diff --git a/.github/dco.yml b/.github/dco.yml new file mode 100644 index 0000000000..0c4b142e9a --- /dev/null +++ b/.github/dco.yml @@ -0,0 +1,2 @@ +require: + members: false diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index 606226523e..a5f764579a 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -13,35 +13,28 @@ on: jobs: Inbox: runs-on: ubuntu-latest - if: github.repository_owner == 'spring-projects' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request == null + if: github.repository_owner == 'spring-projects' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request == null && !contains(join(github.event.issue.labels.*.name, ', '), 'dependency-upgrade') && !contains(github.event.issue.title, 'Release ') steps: - name: Create or Update Issue Card - uses: peter-evans/create-or-update-project-card@v1.1.2 + uses: actions/add-to-project@v1.0.2 with: - project-name: 'Spring Data' - column-name: 'Inbox' - project-location: 'spring-projects' - token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }} + project-url: https://github.com/orgs/spring-projects/projects/25 + github-token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }} Pull-Request: runs-on: ubuntu-latest if: github.repository_owner == 'spring-projects' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request != null steps: - name: Create or Update Pull Request Card - uses: peter-evans/create-or-update-project-card@v1.1.2 + uses: actions/add-to-project@v1.0.2 with: - project-name: 'Spring Data' - column-name: 'Review pending' - project-location: 'spring-projects' - issue-number: ${{ github.event.pull_request.number }} - token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }} + project-url: https://github.com/orgs/spring-projects/projects/25 + github-token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }} Feedback-Provided: runs-on: ubuntu-latest if: github.repository_owner == 'spring-projects' && github.event_name == 'issue_comment' && github.event.action == 'created' && github.actor != 'spring-projects-issues' && github.event.pull_request == null && github.event.issue.state == 'open' && contains(toJSON(github.event.issue.labels), 'waiting-for-feedback') steps: - name: Update Project Card - uses: peter-evans/create-or-update-project-card@v1.1.2 + uses: actions/add-to-project@v1.0.2 with: - project-name: 'Spring Data' - column-name: 'Feedback provided' - project-location: 'spring-projects' - token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }} + project-url: https://github.com/orgs/spring-projects/projects/25 + github-token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }} diff --git a/.gitignore b/.gitignore index be372b6209..27b7a78896 100644 --- a/.gitignore +++ b/.gitignore @@ -15,4 +15,9 @@ src/ant/.ant-targets-upload-dist.xml atlassian-ide-plugin.xml /.gradle/ /.idea/ -*.graphml \ No newline at end of file +*.graphml +build/ +node_modules +node +package-lock.json +.mvn/.develocity diff --git a/.mvn/extensions.xml b/.mvn/extensions.xml new file mode 100644 index 0000000000..e0857eaa25 --- /dev/null +++ b/.mvn/extensions.xml @@ -0,0 +1,8 @@ + + + + io.spring.develocity.conventions + develocity-conventions-maven-extension + 0.0.22 + + diff --git a/.mvn/jvm.config b/.mvn/jvm.config new file mode 100644 index 0000000000..e27f6e8f5e --- /dev/null +++ b/.mvn/jvm.config @@ -0,0 +1,14 @@ +--add-exports jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED +--add-opens jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED +--add-opens jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED +--add-opens=java.base/java.util=ALL-UNNAMED +--add-opens=java.base/java.lang.reflect=ALL-UNNAMED +--add-opens=java.base/java.text=ALL-UNNAMED +--add-opens=java.desktop/java.awt.font=ALL-UNNAMED diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties index 00d32aab1d..5f3193b363 100755 --- a/.mvn/wrapper/maven-wrapper.properties +++ b/.mvn/wrapper/maven-wrapper.properties @@ -1 +1,2 @@ -distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.5.4/apache-maven-3.5.4-bin.zip \ No newline at end of file +#Thu Nov 07 09:47:19 CET 2024 +distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.zip diff --git a/CI.adoc b/CI.adoc index 4e95939a34..057100a955 100644 --- a/CI.adoc +++ b/CI.adoc @@ -16,7 +16,7 @@ All of these use cases are great reasons to essentially run what the CI server d IMPORTANT: To do this you must have Docker installed on your machine. -1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk8-with-mongodb-4.0:latest /bin/bash` +1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk17-with-mongodb-5.0.3:latest /bin/bash` + This will launch the Docker image and mount your source code at `spring-data-mongodb-github`. + diff --git a/Jenkinsfile b/Jenkinsfile index 1eb84755a5..0e83b47e2f 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,3 +1,9 @@ +def p = [:] +node { + checkout scm + p = readProperties interpolate: true, file: 'ci/pipeline.properties' +} + pipeline { agent none @@ -14,49 +20,58 @@ pipeline { stages { stage("Docker images") { parallel { - stage('Publish JDK 8 + MongoDB 4.0') { + stage('Publish JDK (Java 17) + MongoDB 6.0') { when { - changeset "ci/openjdk8-mongodb-4.0/**" + anyOf { + changeset "ci/openjdk17-mongodb-6.0/**" + changeset "ci/pipeline.properties" + } } agent { label 'data' } options { timeout(time: 30, unit: 'MINUTES') } steps { script { - def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.0.23", "ci/openjdk8-mongodb-4.0/") - docker.withRegistry('', 'hub.docker.com-springbuildmaster') { + def image = docker.build("springci/spring-data-with-mongodb-6.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.6.0.version']} ci/openjdk17-mongodb-6.0/") + docker.withRegistry(p['docker.registry'], p['docker.credentials']) { image.push() } } } } - stage('Publish JDK 8 + MongoDB 4.4') { + stage('Publish JDK (Java 17) + MongoDB 7.0') { when { - changeset "ci/openjdk8-mongodb-4.4/**" - } - agent { label 'data' } - options { timeout(time: 30, unit: 'MINUTES') } + anyOf { + changeset "ci/openjdk17-mongodb-7.0/**" + changeset "ci/pipeline.properties" + } + } + agent { label 'data' } + options { timeout(time: 30, unit: 'MINUTES') } steps { script { - def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.4.4", "ci/openjdk8-mongodb-4.4/") - docker.withRegistry('', 'hub.docker.com-springbuildmaster') { + def image = docker.build("springci/spring-data-with-mongodb-7.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.7.0.version']} ci/openjdk17-mongodb-7.0/") + docker.withRegistry(p['docker.registry'], p['docker.credentials']) { image.push() } } } } - stage('Publish JDK 16 + MongoDB 4.4') { + stage('Publish JDK (Java.next) + MongoDB 8.0') { when { - changeset "ci/openjdk16-mongodb-4.4/**" + anyOf { + changeset "ci/openjdk17-mongodb-8.0/**" + changeset "ci/pipeline.properties" + } } agent { label 'data' } options { timeout(time: 30, unit: 'MINUTES') } steps { script { - def image = docker.build("springci/spring-data-openjdk16-with-mongodb-4.4.4", "ci/openjdk16-mongodb-4.4/") - docker.withRegistry('', 'hub.docker.com-springbuildmaster') { + def image = docker.build("springci/spring-data-with-mongodb-8.0:${p['java.next.tag']}", "--build-arg BASE=${p['docker.java.next.image']} --build-arg MONGODB=${p['docker.mongodb.8.0.version']} ci/openjdk23-mongodb-8.0/") + docker.withRegistry(p['docker.registry'], p['docker.credentials']) { image.push() } } @@ -65,10 +80,11 @@ pipeline { } } - stage("test: baseline (jdk8)") { + stage("test: baseline (main)") { when { + beforeAgent(true) anyOf { - branch 'main' + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") not { triggeredBy 'UpstreamCause' } } } @@ -77,18 +93,16 @@ pipeline { } options { timeout(time: 30, unit: 'MINUTES') } environment { - ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c') + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") } steps { script { - docker.withRegistry('', 'hub.docker.com-springbuildmaster') { - docker.image('springci/spring-data-openjdk8-with-mongodb-4.0.23:latest').inside('-v $HOME:/tmp/jenkins-home') { - sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log' - sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &' - sh 'sleep 10' - sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"' - sh 'sleep 15' - sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B' + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image("springci/spring-data-with-mongodb-6.0:${p['java.main.tag']}").inside(p['docker.java.inside.docker']) { + sh 'ci/start-replica.sh' + sh 'MAVEN_OPTS="-Duser.name=' + "${p['jenkins.user.name']}" + ' -Duser.home=/tmp/jenkins-home" ' + + "./mvnw -s settings.xml -Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-mongodb clean dependency:list test -Dsort -U -B" } } } @@ -97,78 +111,51 @@ pipeline { stage("Test other configurations") { when { + beforeAgent(true) allOf { - branch 'main' + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") not { triggeredBy 'UpstreamCause' } } } parallel { - stage("test: mongodb 4.0 (jdk8)") { - agent { - label 'data' - } - options { timeout(time: 30, unit: 'MINUTES') } - environment { - ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c') - } - steps { - script { - docker.withRegistry('', 'hub.docker.com-springbuildmaster') { - docker.image('springci/spring-data-openjdk8-with-mongodb-4.0.23:latest').inside('-v $HOME:/tmp/jenkins-home') { - sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log' - sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &' - sh 'sleep 10' - sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"' - sh 'sleep 15' - sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B' - } - } - } - } - } - - stage("test: mongodb 4.4 (jdk8)") { + stage("test: MongoDB 7.0 (main)") { agent { label 'data' } options { timeout(time: 30, unit: 'MINUTES') } environment { - ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c') + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") } steps { script { - docker.withRegistry('', 'hub.docker.com-springbuildmaster') { - docker.image('springci/spring-data-openjdk8-with-mongodb-4.4.4:latest').inside('-v $HOME:/tmp/jenkins-home') { - sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log' - sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &' - sh 'sleep 10' - sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"' - sh 'sleep 15' - sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B' + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image("springci/spring-data-with-mongodb-7.0:${p['java.main.tag']}").inside(p['docker.java.inside.docker']) { + sh 'ci/start-replica.sh' + sh 'MAVEN_OPTS="-Duser.name=' + "${p['jenkins.user.name']}" + ' -Duser.home=/tmp/jenkins-home" ' + + "./mvnw -s settings.xml -Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-mongodb clean dependency:list test -Dsort -U -B" } } } } } - stage("test: baseline (jdk16)") { + stage("test: MongoDB 8.0") { agent { label 'data' } options { timeout(time: 30, unit: 'MINUTES') } environment { - ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c') + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") } steps { script { - docker.withRegistry('', 'hub.docker.com-springbuildmaster') { - docker.image('springci/spring-data-openjdk16-with-mongodb-4.4.4:latest').inside('-v $HOME:/tmp/jenkins-home') { - sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log' - sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &' - sh 'sleep 10' - sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"' - sh 'sleep 15' - sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pjava11 clean dependency:list test -Duser.name=jenkins -Dsort -U -B' + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image("springci/spring-data-with-mongodb-8.0:${p['java.next.tag']}").inside(p['docker.java.inside.docker']) { + sh 'ci/start-replica.sh' + sh 'MAVEN_OPTS="-Duser.name=' + "${p['jenkins.user.name']}" + ' -Duser.home=/tmp/jenkins-home" ' + + "./mvnw -s settings.xml -Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-mongodb clean dependency:list test -Dsort -U -B" } } } @@ -179,8 +166,9 @@ pipeline { stage('Release to artifactory') { when { + beforeAgent(true) anyOf { - branch 'main' + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") not { triggeredBy 'UpstreamCause' } } } @@ -188,52 +176,25 @@ pipeline { label 'data' } options { timeout(time: 20, unit: 'MINUTES') } - environment { - ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c') + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") } - steps { script { - docker.withRegistry('', 'hub.docker.com-springbuildmaster') { - docker.image('adoptopenjdk/openjdk8:latest').inside('-v $HOME:/tmp/jenkins-home') { - sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,artifactory ' + - '-Dartifactory.server=https://repo.spring.io ' + + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image(p['docker.java.main.image']).inside(p['docker.java.inside.docker']) { + sh 'MAVEN_OPTS="-Duser.name=' + "${p['jenkins.user.name']}" + ' -Duser.home=/tmp/jenkins-home" ' + + "./mvnw -s settings.xml -Pci,artifactory " + + "-Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root " + + "-Dartifactory.server=${p['artifactory.url']} " + "-Dartifactory.username=${ARTIFACTORY_USR} " + "-Dartifactory.password=${ARTIFACTORY_PSW} " + - "-Dartifactory.staging-repository=libs-snapshot-local " + + "-Dartifactory.staging-repository=${p['artifactory.repository.snapshot']} " + "-Dartifactory.build-name=spring-data-mongodb " + - "-Dartifactory.build-number=${BUILD_NUMBER} " + - '-Dmaven.test.skip=true clean deploy -U -B' - } - } - } - } - } - - stage('Publish documentation') { - when { - branch 'main' - } - agent { - label 'data' - } - options { timeout(time: 20, unit: 'MINUTES') } - - environment { - ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c') - } - - steps { - script { - docker.withRegistry('', 'hub.docker.com-springbuildmaster') { - docker.image('adoptopenjdk/openjdk8:latest').inside('-v $HOME:/tmp/jenkins-home') { - sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,distribute ' + - '-Dartifactory.server=https://repo.spring.io ' + - "-Dartifactory.username=${ARTIFACTORY_USR} " + - "-Dartifactory.password=${ARTIFACTORY_PSW} " + - "-Dartifactory.distribution-repository=temp-private-local " + - '-Dmaven.test.skip=true clean deploy -U -B' + "-Dartifactory.build-number=spring-data-mongodb-${BRANCH_NAME}-build-${BUILD_NUMBER} " + + "-Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-mongodb " + + "-Dmaven.test.skip=true clean deploy -U -B" } } } @@ -244,10 +205,6 @@ pipeline { post { changed { script { - slackSend( - color: (currentBuild.currentResult == 'SUCCESS') ? 'good' : 'danger', - channel: '#spring-data-dev', - message: "${currentBuild.fullDisplayName} - `${currentBuild.currentResult}`\n${env.BUILD_URL}") emailext( subject: "[${currentBuild.fullDisplayName}] ${currentBuild.currentResult}", mimeType: 'text/html', diff --git a/README.adoc b/README.adoc index e80d6ac4ef..61b956fbfc 100644 --- a/README.adoc +++ b/README.adoc @@ -1,17 +1,19 @@ -image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start] image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start] +image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://spring.io/projects/spring-data-mongodb#quick-start] image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://spring.io/projects/spring-data-mongodb#quick-start] -= Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]] += Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] image:https://img.shields.io/badge/Revved%20up%20by-Develocity-06A0CE?logo=Gradle&labelColor=02303A["Revved up by Develocity", link="https://ge.spring.io/scans?search.rootProjectNames=Spring Data MongoDB"] -The primary goal of the https://projects.spring.io/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services. +The primary goal of the https://spring.io/projects/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services. The Spring Data MongoDB project aims to provide a familiar and consistent Spring-based programming model for new datastores while retaining store-specific features and capabilities. The Spring Data MongoDB project provides integration with the MongoDB document database. Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB `+Document+` and easily writing a repository style data access layer. +[[code-of-conduct]] == Code of Conduct This project is governed by the https://github.com/spring-projects/.github/blob/e3cc2ff230d8f1dca06535aa6b5a4a23815861d4/CODE_OF_CONDUCT.md[Spring Code of Conduct]. By participating, you are expected to uphold this code of conduct. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io. +[[getting-started]] == Getting Started Here is a quick teaser of an application using Spring Data Repositories in Java: @@ -59,6 +61,7 @@ class ApplicationConfig extends AbstractMongoClientConfiguration { } ---- +[[maven-configuration]] === Maven configuration Add the Maven dependency: @@ -68,200 +71,148 @@ Add the Maven dependency: org.springframework.data spring-data-mongodb - ${version}.RELEASE + ${version} ---- -If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version. +If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository +and declare the appropriate dependency version. [source,xml] ---- org.springframework.data spring-data-mongodb - ${version}.BUILD-SNAPSHOT + ${version}-SNAPSHOT - spring-libs-snapshot + spring-snapshot Spring Snapshot Repository - https://repo.spring.io/libs-snapshot + https://repo.spring.io/snapshot ---- -== Upgrading from 2.x +[[upgrading]] +== Upgrading -The 4.0 MongoDB Java Driver does no longer support certain features that have already been deprecated in one of the last minor versions. -Some of the changes affect the initial setup configuration as well as compile/runtime features. We summarized the most typical changes one might encounter. +Instructions for how to upgrade from earlier versions of Spring Data are provided on the project https://github.com/spring-projects/spring-data-commons/wiki[wiki]. +Follow the links in the https://github.com/spring-projects/spring-data-commons/wiki#release-notes[release notes section] to find the version that you want to upgrade to. -=== XML Namespace - -.Changed XML Namespace Elements and Attributes: -|=== -Element / Attribute | 2.x | 3.x - -| `` -| Used to create a `com.mongodb.MongoClient` -| Now exposes a `com.mongodb.client.MongoClient` - -| `` -| Was a comma delimited list of replica set members (host/port) -| Now defines the replica set name. + -Use `` instead - -| `` -| NONE, NORMAL, SAFE, FSYNC_SAFE, REPLICAS_SAFE, MAJORITY -| W1, W2, W3, UNAKNOWLEDGED, AKNOWLEDGED, JOURNALED, MAJORITY -|=== - -.Removed XML Namespace Elements and Attributes: -|=== -Element / Attribute | Replacement in 3.x | Comment - -| `` -| `` -| Referencing a `com.mongodb.client.MongoClient`. - -| `` -| `` -| Single authentication data instead of list. - -| `` -| `` -| See `com.mongodb.MongoClientSettings` for details. -|=== +[[getting-help]] +== Getting Help -.New XML Namespace Elements and Attributes: -|=== -Element | Comment +Having trouble with Spring Data? We’d love to help! -| `` -| Replacement for `` +* Check the +https://docs.spring.io/spring-data/mongodb/reference/[reference documentation], and https://docs.spring.io/spring-data/mongodb/docs/current/api/[Javadocs] +* Learn the Spring basics – Spring Data builds on Spring Framework, check the https://spring.io[spring.io] web-site for a wealth of reference documentation. +If you are just starting out with Spring, try one of the https://spring.io/guides[guides]. +* If you are upgrading, check out the https://docs.spring.io/spring-data/mongodb/docs/current/changelog.txt[changelog] for "`new and noteworthy`" features. +* Ask a question - we monitor https://stackoverflow.com[stackoverflow.com] for questions tagged with https://stackoverflow.com/tags/spring-data[`spring-data-mongodb`]. +* Report bugs with Spring Data MongoDB at https://github.com/spring-projects/spring-data-mongodb/issues[github.com/spring-projects/spring-data-mongodb/issues]. -| `` -| Replacement for `uri` and `client-uri`. +[[reporting-issues]] +== Reporting Issues -| `` -| Replacement for `uri` and `client-uri`. +Spring Data uses Github as issue tracking system to record bugs and feature requests. +If you want to raise an issue, please follow the recommendations below: -| `` -| Namespace element for `com.mongodb.MongoClientSettings`. +* Before you log a bug, please search the https://github.com/spring-projects/spring-data-mongodb/issues[issue tracker] to see if someone has already reported the problem. +* If the issue does not already exist, https://github.com/spring-projects/spring-data-mongodb/issues/new[create a new issue]. +* Please provide as much information as possible with the issue report, we like to know the version of Spring Data that you are using, the JVM version, Stacktrace, etc. +* If you need to paste code, or include a stack trace use https://guides.github.com/features/mastering-markdown/[Markdown] code fences +++```+++. -|=== +[[guides]] +== Guides -=== Java Configuration +The https://spring.io/[spring.io] site contains several guides that show how to use Spring Data step-by-step: -.Java API changes -|=== -Type | Comment +* https://spring.io/guides/gs/accessing-data-mongodb/[Accessing Data with MongoDB] is a very basic guide that shows you how to create a simple application and how to access data using repositories. +* https://spring.io/guides/gs/accessing-mongodb-data-rest/[Accessing MongoDB Data with REST] is a guide to creating a REST web service exposing data stored in MongoDB through repositories. -| `MongoClientFactoryBean` -| Creates `com.mongodb.client.MongoClient` instead of `com.mongodb.MongoClient` + -Uses `MongoClientSettings` instead of `MongoClientOptions`. +[[examples]] +== Examples -| `MongoDataIntegrityViolationException` -| Uses `WriteConcernResult` instead of `WriteResult`. +* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail. -| `BulkOperationException` -| Uses `MongoBulkWriteException` and `com.mongodb.bulk.BulkWriteError` instead of `BulkWriteException` and `com.mongodb.BulkWriteError` +[[building-from-source]] +== Building from Source -| `ReactiveMongoClientFactoryBean` -| Uses `com.mongodb.MongoClientSettings` instead of `com.mongodb.async.client.MongoClientSettings` +You do not need to build from source to use Spring Data. Binaries are available in https://repo.spring.io[repo.spring.io] +and accessible from Maven using the Maven configuration noted <>. -| `ReactiveMongoClientSettingsFactoryBean` -| Now produces `com.mongodb.MongoClientSettings` instead of `com.mongodb.async.client.MongoClientSettings` -|=== +NOTE: Configuration for Gradle is similar to Maven. -.Removed Java API: -|=== -2.x | Replacement in 3.x | Comment +The best way to get started is by creating a Spring Boot project using MongoDB on https://start.spring.io[start.spring.io]. +Follow this https://start.spring.io/#type=maven-project&language=java&platformVersion=3.0.0&packaging=jar&jvmVersion=17&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb[link] +to build an imperative application and this https://start.spring.io/#type=maven-project&language=java&platformVersion=3.0.0&packaging=jar&jvmVersion=17&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb-reactive[link] +to build a reactive one. -| `MongoClientOptionsFactoryBean` -| `MongoClientSettingsFactoryBean` -| Creating a `com.mongodb.MongoClientSettings`. +However, if you want to try out the latest and greatest, Spring Data MongoDB can be easily built with the https://github.com/takari/maven-wrapper[Maven wrapper] +and minimally, JDK 17 (https://www.oracle.com/java/technologies/downloads/[JDK downloads]). -| `AbstractMongoConfiguration` -| `AbstractMongoClientConfiguration` + -(Available since 2.1) -| Using `com.mongodb.client.MongoClient`. +In order to build Spring Data MongoDB, you will need to https://www.mongodb.com/try/download/community[download] +and https://docs.mongodb.com/manual/installation/[install a MongoDB distribution]. -| `MongoDbFactory#getLegacyDb()` -| - -| - +Once you have installed MongoDB, you need to start a MongoDB server. It is convenient to set an environment variable to +your MongoDB installation directory (e.g. `MONGODB_HOME`). -| `SimpleMongoDbFactory` -| `SimpleMongoClientDbFactory` + -(Available since 2.1) -| +To run the full test suite, a https://docs.mongodb.com/manual/tutorial/deploy-replica-set/[MongoDB Replica Set] +is required. -| `MapReduceOptions#getOutputType()` -| `MapReduceOptions#getMapReduceAction()` -| Returns `MapReduceAction` instead of `MapReduceCommand.OutputType`. +To run the MongoDB server enter the following command from a command-line: -| `Meta\|Query` maxScan & snapshot -| -| -|=== +[source,bash] +---- +$ $MONGODB_HOME/bin/mongod --dbpath $MONGODB_HOME/runtime/data --ipv6 --port 27017 --replSet rs0 +... +"msg":"Successfully connected to host" +---- -=== Other Changes +Once the MongoDB server starts up, you should see the message (`msg`), "_Successfully connected to host_". -==== UUID Types +Notice the `--dbpath` option to the `mongod` command. You can set this to anything you like, but in this case, we set +the absolute path to a sub-directory (`runtime/data/`) under the MongoDB installation directory (in `$MONGODB_HOME`). -The MongoDB UUID representation can now be configured with different formats. -This has to be done via `MongoClientSettings` as shown in the snippet below. +You need to initialize the MongoDB replica set only once on the first time the MongoDB server is started. +To initialize the replica set, start a mongo client: -.UUID Codec Configuration -==== -[source,java] +[source,bash] ---- -static class Config extends AbstractMongoClientConfiguration { - - @Override - public void configureClientSettings(MongoClientSettings.Builder builder) { - builder.uuidRepresentation(UuidRepresentation.STANDARD); - } - - // ... -} +$ $MONGODB_HOME/bin/mongo +MongoDB server version: 6.0.0 +... ---- -==== - -== Getting Help - -Having trouble with Spring Data? We’d love to help! -* Check the -https://docs.spring.io/spring-data/mongodb/docs/current/reference/html/[reference documentation], and https://docs.spring.io/spring-data/mongodb/docs/current/api/[Javadocs]. -* Learn the Spring basics – Spring Data builds on Spring Framework, check the https://spring.io[spring.io] web-site for a wealth of reference documentation. -If you are just starting out with Spring, try one of the https://spring.io/guides[guides]. -* If you are upgrading, check out the https://docs.spring.io/spring-data/mongodb/docs/current/changelog.txt[changelog] for "`new and noteworthy`" features. -* Ask a question - we monitor https://stackoverflow.com[stackoverflow.com] for questions tagged with https://stackoverflow.com/tags/spring-data[`spring-data-mongodb`]. -You can also chat with the community on https://gitter.im/spring-projects/spring-data[Gitter]. -* Report bugs with Spring Data MongoDB at https://github.com/spring-projects/spring-data-mongodb/issues[github.com/spring-projects/spring-data-mongodb/issues]. +Then enter the following command: -== Reporting Issues +[source,bash] +---- +mongo> rs.initiate({ _id: 'rs0', members: [ { _id: 0, host: '127.0.0.1:27017' } ] }) +---- -Spring Data uses Github as issue tracking system to record bugs and feature requests. -If you want to raise an issue, please follow the recommendations below: +Finally, on UNIX-based system (for example, Linux or Mac OS X) you may need to adjust the `ulimit`. +In case you need to, you can adjust the `ulimit` with the following command (32768 is just a recommendation): -* Before you log a bug, please search the https://github.com/spring-projects/spring-data-mongodb/issues[issue tracker] to see if someone has already reported the problem. -* If the issue does not already exist, https://github.com/spring-projects/spring-data-mongodb/issues/new[create a new issue]. -* Please provide as much information as possible with the issue report, we like to know the version of Spring Data that you are using, the JVM version, Stacktrace, etc. -* If you need to paste code, or include a stack trace use https://guides.github.com/features/mastering-markdown/[Markdown] code fences +++```+++. +[source,bash] +---- +$ ulimit -n 32768 +---- -== Building from Source +You can use `ulimit -a` again to verify the `ulimit` for "_open files_" was set appropriately. -You don’t need to build from source to use Spring Data (binaries in https://repo.spring.io[repo.spring.io]), but if you want to try out the latest and greatest, Spring Data can be easily built with the https://github.com/takari/maven-wrapper[maven wrapper]. -You also need JDK 1.8. +Now you are ready to build Spring Data MongoDB. Simply enter the following `mvnw` (Maven Wrapper) command: [source,bash] ---- $ ./mvnw clean install ---- -If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.5.0 or above]. +If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.8.0 or above]. -_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular please sign the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._ +_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular, please sign +the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._ === Building reference documentation @@ -269,22 +220,12 @@ Building the documentation builds also the project without running tests. [source,bash] ---- - $ ./mvnw clean install -Pdistribute + $ ./mvnw clean install -Pantora ---- -The generated documentation is available from `target/site/reference/html/index.html`. - -== Guides - -The https://spring.io/[spring.io] site contains several guides that show how to use Spring Data step-by-step: - -* https://spring.io/guides/gs/accessing-data-mongodb/[Accessing Data with MongoDB] is a very basic guide that shows you how to create a simple application and how to access data using repositories. -* https://spring.io/guides/gs/accessing-mongodb-data-rest/[Accessing MongoDB Data with REST] is a guide to creating a REST web service exposing data stored in MongoDB through repositories. - -== Examples - -* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail. +The generated documentation is available from `target/antora/site/index.html`. +[[license]] == License Spring Data MongoDB is Open Source software released under the https://www.apache.org/licenses/LICENSE-2.0.html[Apache 2.0 license]. diff --git a/ci/README.adoc b/ci/README.adoc index 3e65271901..f1c11d8496 100644 --- a/ci/README.adoc +++ b/ci/README.adoc @@ -10,7 +10,7 @@ All of these use cases are great reasons to essentially run what Concourse does IMPORTANT: To do this you must have Docker installed on your machine. -1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-8-jdk-with-mongodb /bin/bash` +1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk17-with-mongodb-5.0.3 /bin/bash` + This will launch the Docker image and mount your source code at `spring-data-mongodb-github`. + @@ -23,7 +23,7 @@ Since the container is binding to your source, you can make edits from your IDE If you need to test the `build.sh` script, do this: 1. `mkdir /tmp/spring-data-mongodb-artifactory` -2. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github --mount type=bind,source="/tmp/spring-data-mongodb-artifactory",target=/spring-data-mongodb-artifactory springci/spring-data-8-jdk-with-mongodb /bin/bash` +2. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github --mount type=bind,source="/tmp/spring-data-mongodb-artifactory",target=/spring-data-mongodb-artifactory springci/spring-data-openjdk17-with-mongodb-5.0.3 /bin/bash` + This will launch the Docker image and mount your source code at `spring-data-mongodb-github` and the temporary artifactory output directory at `spring-data-mongodb-artifactory`. @@ -36,4 +36,4 @@ IMPORTANT: `build.sh` doesn't actually push to Artifactory so don't worry about It just deploys to a local folder. That way, the `artifactory-resource` later in the pipeline can pick up these artifacts and deliver them to artifactory. -NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images. \ No newline at end of file +NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images. diff --git a/ci/openjdk11-mongodb-4.4/Dockerfile b/ci/openjdk11-mongodb-4.4/Dockerfile deleted file mode 100644 index 6c94ac38ff..0000000000 --- a/ci/openjdk11-mongodb-4.4/Dockerfile +++ /dev/null @@ -1,15 +0,0 @@ -FROM adoptopenjdk/openjdk11:latest - -ENV TZ=Etc/UTC -ENV DEBIAN_FRONTEND=noninteractive - -RUN set -eux; \ - apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \ - apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \ - echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \ - echo ${TZ} > /etc/timezone; - -RUN apt-get update ; \ - apt-get install -y mongodb-org=4.4.4 mongodb-org-server=4.4.4 mongodb-org-shell=4.4.4 mongodb-org-mongos=4.4.4 mongodb-org-tools=4.4.4 ; \ - apt-get clean; \ - rm -rf /var/lib/apt/lists/*; diff --git a/ci/openjdk16-mongodb-4.4/Dockerfile b/ci/openjdk16-mongodb-4.4/Dockerfile deleted file mode 100644 index 7a1e47cf00..0000000000 --- a/ci/openjdk16-mongodb-4.4/Dockerfile +++ /dev/null @@ -1,15 +0,0 @@ -FROM adoptopenjdk/openjdk16:latest - -ENV TZ=Etc/UTC -ENV DEBIAN_FRONTEND=noninteractive - -RUN set -eux; \ - apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \ - apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \ - echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \ - echo ${TZ} > /etc/timezone; - -RUN apt-get update ; \ - apt-get install -y mongodb-org=4.4.4 mongodb-org-server=4.4.4 mongodb-org-shell=4.4.4 mongodb-org-mongos=4.4.4 mongodb-org-tools=4.4.4 ; \ - apt-get clean; \ - rm -rf /var/lib/apt/lists/*; diff --git a/ci/openjdk17-mongodb-6.0/Dockerfile b/ci/openjdk17-mongodb-6.0/Dockerfile new file mode 100644 index 0000000000..fd2580e23a --- /dev/null +++ b/ci/openjdk17-mongodb-6.0/Dockerfile @@ -0,0 +1,25 @@ +ARG BASE +FROM ${BASE} +# Any ARG statements before FROM are cleared. +ARG MONGODB + +ENV TZ=Etc/UTC +ENV DEBIAN_FRONTEND=noninteractive +ENV MONGO_VERSION=${MONGODB} + +RUN set -eux; \ + sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \ + sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \ + sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \ + sed -i -e 's/http/https/g' /etc/apt/sources.list && \ + apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \ + # MongoDB 6.0 release signing key + wget -qO - https://www.mongodb.org/static/pgp/server-6.0.asc | apt-key add - && \ + # Needed when MongoDB creates a 6.0 folder. + echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/6.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-6.0.list && \ + echo ${TZ} > /etc/timezone + +RUN apt-get update && \ + apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* diff --git a/ci/openjdk17-mongodb-7.0/Dockerfile b/ci/openjdk17-mongodb-7.0/Dockerfile new file mode 100644 index 0000000000..5701ab9fbc --- /dev/null +++ b/ci/openjdk17-mongodb-7.0/Dockerfile @@ -0,0 +1,25 @@ +ARG BASE +FROM ${BASE} +# Any ARG statements before FROM are cleared. +ARG MONGODB + +ENV TZ=Etc/UTC +ENV DEBIAN_FRONTEND=noninteractive +ENV MONGO_VERSION=${MONGODB} + +RUN set -eux; \ + sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \ + sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \ + sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \ + sed -i -e 's/http/https/g' /etc/apt/sources.list && \ + apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \ + # MongoDB 6.0 release signing key + wget -qO - https://www.mongodb.org/static/pgp/server-7.0.asc | apt-key add - && \ + # Needed when MongoDB creates a 7.0 folder. + echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/7.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-7.0.list && \ + echo ${TZ} > /etc/timezone + +RUN apt-get update && \ + apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* diff --git a/ci/openjdk23-mongodb-8.0/Dockerfile b/ci/openjdk23-mongodb-8.0/Dockerfile new file mode 100644 index 0000000000..0cb80001bf --- /dev/null +++ b/ci/openjdk23-mongodb-8.0/Dockerfile @@ -0,0 +1,25 @@ +ARG BASE +FROM ${BASE} +# Any ARG statements before FROM are cleared. +ARG MONGODB + +ENV TZ=Etc/UTC +ENV DEBIAN_FRONTEND=noninteractive +ENV MONGO_VERSION=${MONGODB} + +RUN set -eux; \ + sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \ + sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \ + sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \ + sed -i -e 's/http/https/g' /etc/apt/sources.list && \ + apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \ + # MongoDB 8.0 release signing key + wget -qO - https://www.mongodb.org/static/pgp/server-8.0.asc | apt-key add - && \ + # Needed when MongoDB creates a 8.0 folder. + echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu noble/mongodb-org/8.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-8.0.list && \ + echo ${TZ} > /etc/timezone + +RUN apt-get update && \ + apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* diff --git a/ci/openjdk8-mongodb-4.0/Dockerfile b/ci/openjdk8-mongodb-4.0/Dockerfile deleted file mode 100644 index e05068ab32..0000000000 --- a/ci/openjdk8-mongodb-4.0/Dockerfile +++ /dev/null @@ -1,15 +0,0 @@ -FROM adoptopenjdk/openjdk8:latest - -ENV TZ=Etc/UTC -ENV DEBIAN_FRONTEND=noninteractive - -RUN RUN set -eux; \ - apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \ - apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4 ; \ - echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list; \ - echo ${TZ} > /etc/timezone; - -RUN apt-get update ; \ - apt-get install -y mongodb-org=4.0.23 mongodb-org-server=4.0.23 mongodb-org-shell=4.0.23 mongodb-org-mongos=4.0.23 mongodb-org-tools=4.0.23 ; \ - apt-get clean; \ - rm -rf /var/lib/apt/lists/*; diff --git a/ci/openjdk8-mongodb-4.4/Dockerfile b/ci/openjdk8-mongodb-4.4/Dockerfile deleted file mode 100644 index 79774dd269..0000000000 --- a/ci/openjdk8-mongodb-4.4/Dockerfile +++ /dev/null @@ -1,17 +0,0 @@ -FROM adoptopenjdk/openjdk8:latest - -ENV TZ=Etc/UTC -ENV DEBIAN_FRONTEND=noninteractive - -RUN set -eux; \ - apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \ - apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \ - echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \ - echo ${TZ} > /etc/timezone; - -RUN apt-get update ; \ - ln -T /bin/true /usr/bin/systemctl ; \ - apt-get install -y mongodb-org=4.4.4 mongodb-org-server=4.4.4 mongodb-org-shell=4.4.4 mongodb-org-mongos=4.4.4 mongodb-org-tools=4.4.4 ; \ - rm /usr/bin/systemctl ; \ - apt-get clean; \ - rm -rf /var/lib/apt/lists/*; diff --git a/ci/pipeline.properties b/ci/pipeline.properties new file mode 100644 index 0000000000..8dd2295acc --- /dev/null +++ b/ci/pipeline.properties @@ -0,0 +1,32 @@ +# Java versions +java.main.tag=17.0.15_6-jdk-focal +java.next.tag=24.0.1_9-jdk-noble + +# Docker container images - standard +docker.java.main.image=library/eclipse-temurin:${java.main.tag} +docker.java.next.image=library/eclipse-temurin:${java.next.tag} + +# Supported versions of MongoDB +docker.mongodb.6.0.version=6.0.23 +docker.mongodb.7.0.version=7.0.20 +docker.mongodb.8.0.version=8.0.9 + +# Supported versions of Redis +docker.redis.6.version=6.2.13 +docker.redis.7.version=7.2.4 +docker.valkey.8.version=8.1.1 + +# Docker environment settings +docker.java.inside.basic=-v $HOME:/tmp/jenkins-home +docker.java.inside.docker=-u root -v /var/run/docker.sock:/var/run/docker.sock -v /usr/bin/docker:/usr/bin/docker -v $HOME:/tmp/jenkins-home + +# Credentials +docker.registry= +docker.credentials=hub.docker.com-springbuildmaster +docker.proxy.registry=https://docker-hub.usw1.packages.broadcom.com +docker.proxy.credentials=usw1_packages_broadcom_com-jenkins-token +artifactory.credentials=02bd1690-b54f-4c9f-819d-a77cb7a9822c +artifactory.url=https://repo.spring.io +artifactory.repository.snapshot=libs-snapshot-local +develocity.access-key=gradle_enterprise_secret_access_key +jenkins.user.name=spring-builds+jenkins diff --git a/ci/start-replica.sh b/ci/start-replica.sh new file mode 100755 index 0000000000..9124976f39 --- /dev/null +++ b/ci/start-replica.sh @@ -0,0 +1,6 @@ +#!/bin/sh +mkdir -p /tmp/mongodb/db /tmp/mongodb/log +mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log & +sleep 10 +mongosh --eval "rs.initiate({_id: 'rs0', members:[{_id: 0, host: '127.0.0.1:27017'}]});" +sleep 15 diff --git a/pom.xml b/pom.xml index a6d5da9170..962ae73ffe 100644 --- a/pom.xml +++ b/pom.xml @@ -5,17 +5,17 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 4.5.0-SNAPSHOT pom Spring Data MongoDB MongoDB support for Spring Data - https://projects.spring.io/spring-data-mongodb + https://spring.io/projects/spring-data-mongodb org.springframework.data.build spring-data-parent - 2.6.0-SNAPSHOT + 3.5.0-SNAPSHOT @@ -26,9 +26,8 @@ multi spring-data-mongodb - 2.6.0-SNAPSHOT - 4.2.3 - ${mongo} + 3.5.0-SNAPSHOT + 5.5.0 1.19 @@ -112,52 +111,66 @@ + + scm:git:https://github.com/spring-projects/spring-data-mongodb.git + scm:git:git@github.com:spring-projects/spring-data-mongodb.git + https://github.com/spring-projects/spring-data-mongodb + + + + GitHub + https://github.com/spring-projects/spring-data-mongodb/issues + + - benchmarks - - spring-data-mongodb - spring-data-mongodb-distribution - spring-data-mongodb-benchmarks - + jmh + + + jitpack.io + https://jitpack.io + + + + + mongo-4.x + + 4.11.1 + 1.8.0 + + - - - - org.mongodb - mongodb-driver-core - ${mongo} - - + + + + + org.mongodb + mongodb-driver-bom + ${mongo} + pom + import + + + + - spring-libs-snapshot - https://repo.spring.io/libs-snapshot + spring-snapshot + https://repo.spring.io/snapshot + + true + + + false + - sonatype-libs-snapshot - https://oss.sonatype.org/content/repositories/snapshots - - false - - - true - + spring-milestone + https://repo.spring.io/milestone - - - spring-plugins-release - https://repo.spring.io/plugins-release - - - spring-libs-milestone - https://repo.spring.io/libs-milestone - - - diff --git a/spring-data-mongodb-benchmarks/README.md b/spring-data-mongodb-benchmarks/README.md deleted file mode 100644 index ca14cc11a9..0000000000 --- a/spring-data-mongodb-benchmarks/README.md +++ /dev/null @@ -1,76 +0,0 @@ -# Benchmarks - -Benchmarks are based on [JMH](https://openjdk.java.net/projects/code-tools/jmh/). - -# Running Benchmarks - -Running benchmarks is disabled by default and can be activated via the `benchmarks` profile. -To run the benchmarks with default settings use. - -```bash -mvn -P benchmarks clean test -``` - -A basic report will be printed to the CLI. - -```bash -# Run complete. Total time: 00:00:15 - -Benchmark Mode Cnt Score Error Units -MappingMongoConverterBenchmark.readObject thrpt 10 1920157,631 ± 64310,809 ops/s -MappingMongoConverterBenchmark.writeObject thrpt 10 782732,857 ± 53804,130 ops/s -``` - -## Running all Benchmarks of a specific class - -To run all Benchmarks of a specific class, just provide its simple class name via the `benchmark` command line argument. - -```bash -mvn -P benchmarks clean test -D benchmark=MappingMongoConverterBenchmark -``` - -## Running a single Benchmark - -To run a single Benchmark provide its containing class simple name followed by `#` and the method name via the `benchmark` command line argument. - -```bash -mvn -P benchmarks clean test -D benchmark=MappingMongoConverterBenchmark#readObjectWith2Properties -``` - -# Saving Benchmark Results - -A detailed benchmark report is stored in JSON format in the `/target/reports/performance` directory. -To store the report in a different location use the `benchmarkReportDir` command line argument. - -## MongoDB - -Results can be directly piped to MongoDB by providing a valid [Connection String](https://docs.mongodb.com/manual/reference/connection-string/) via the `publishTo` command line argument. - -```bash -mvn -P benchmarks clean test -D publishTo=mongodb://127.0.0.1:27017 -``` - -NOTE: If the uri does not explicitly define a database the default `spring-data-mongodb-benchmarks` is used. - -## HTTP Endpoint - -The benchmark report can also be posted as `application/json` to an HTTP Endpoint by providing a valid URl via the `publishTo` command line argument. - -```bash -mvn -P benchmarks clean test -D publishTo=http://127.0.0.1:8080/capture-benchmarks -``` - -# Customizing Benchmarks - -Following options can be set via command line. - -Option | Default Value ---- | --- -warmupIterations | 10 -warmupTime | 1 (seconds) -measurementIterations | 10 -measurementTime | 1 (seconds) -forks | 1 -benchmarkReportDir | /target/reports/performance (always relative to project root dir) -benchmark | .* (single benchmark via `classname#benchmark`) -publishTo | \[not set\] (mongodb-uri or http-endpoint) \ No newline at end of file diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml deleted file mode 100644 index 0033bd11d5..0000000000 --- a/spring-data-mongodb-benchmarks/pom.xml +++ /dev/null @@ -1,112 +0,0 @@ - - - - 4.0.0 - - - org.springframework.data - spring-data-mongodb-parent - 3.3.0-SNAPSHOT - ../pom.xml - - - spring-data-mongodb-benchmarks - jar - - Spring Data MongoDB - Microbenchmarks - - - - true - - - - - - ${project.groupId} - spring-data-mongodb - ${project.version} - - - - junit - junit - ${junit} - compile - - - - org.openjdk.jmh - jmh-core - ${jmh.version} - - - - org.openjdk.jmh - jmh-generator-annprocess - ${jmh.version} - provided - - - - - - - - benchmarks - - false - - - - - - - - pl.project13.maven - git-commit-id-plugin - 2.2.2 - - - - revision - - - - - - maven-jar-plugin - - - default-jar - never - - - - - maven-surefire-plugin - - false - ${project.build.sourceDirectory} - ${project.build.outputDirectory} - - **/AbstractMicrobenchmark.java - **/*$*.class - **/generated/*.class - - - **/*Benchmark* - - - ${project.build.directory}/reports/performance - ${project.version} - ${git.dirty} - ${git.commit.id} - ${git.branch} - - - - - - diff --git a/spring-data-mongodb-benchmarks/src/main/resources/logback.xml b/spring-data-mongodb-benchmarks/src/main/resources/logback.xml deleted file mode 100644 index bccb2dc4fa..0000000000 --- a/spring-data-mongodb-benchmarks/src/main/resources/logback.xml +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - %d %5p %40.40c:%4L - %m%n - - - - - - - - \ No newline at end of file diff --git a/spring-data-mongodb-distribution/package.json b/spring-data-mongodb-distribution/package.json new file mode 100644 index 0000000000..4689506b3f --- /dev/null +++ b/spring-data-mongodb-distribution/package.json @@ -0,0 +1,10 @@ +{ + "dependencies": { + "antora": "3.2.0-alpha.6", + "@antora/atlas-extension": "1.0.0-alpha.2", + "@antora/collector-extension": "1.0.0-alpha.7", + "@asciidoctor/tabs": "1.0.0-beta.6", + "@springio/antora-extensions": "1.13.0", + "@springio/asciidoctor-extensions": "1.0.0-alpha.11" + } +} diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index f62c8dc7f4..58c63dfc97 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -1,6 +1,7 @@ - + 4.0.0 @@ -14,30 +15,59 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 4.5.0-SNAPSHOT ../pom.xml ${basedir}/.. - SDMONGO + ${project.basedir}/../src/main/antora/antora-playbook.yml + + + ${project.basedir}/../src/main/antora/resources/antora-resources + true + + + + org.codehaus.mojo + build-helper-maven-plugin + 3.4.0 + + + timestamp-property + + timestamp-property + + validate + + current.year + yyyy + + + + + + org.apache.maven.plugins + maven-resources-plugin + + + + resources + + + + org.apache.maven.plugins maven-assembly-plugin - org.asciidoctor - asciidoctor-maven-plugin - - - ${mongo.reactivestreams} - ${reactor} - - + org.antora + antora-maven-plugin diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 1f157e75bc..b842a2def3 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -1,5 +1,7 @@ - + 4.0.0 @@ -11,7 +13,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 4.5.0-SNAPSHOT ../pom.xml @@ -25,6 +27,30 @@ + + + org.mongodb + mongodb-driver-core + + + + org.mongodb + mongodb-driver-sync + true + + + + org.mongodb + mongodb-driver-reactivestreams + true + + + + org.mongodb + mongodb-crypt + true + + org.springframework @@ -94,22 +120,6 @@ true - - - - org.mongodb - mongodb-driver-sync - ${mongo} - true - - - - org.mongodb - mongodb-driver-reactivestreams - ${mongo.reactivestreams} - true - - io.projectreactor reactor-core @@ -123,24 +133,10 @@ - io.reactivex - rxjava - ${rxjava} - true - - - - io.reactivex - rxjava-reactive-streams - ${rxjava-reactive-streams} - true - - - - io.reactivex.rxjava2 - rxjava - ${rxjava2} - true + org.awaitility + awaitility + ${awaitility} + test @@ -152,13 +148,6 @@ - - org.apache.geronimo.specs - geronimo-jcdi_2.0_spec - 1.0.1 - test - - javax.interceptor javax.interceptor-api @@ -167,17 +156,17 @@ - javax.enterprise - cdi-api + jakarta.enterprise + jakarta.enterprise.cdi-api ${cdi} provided true - javax.annotation - javax.annotation-api - ${javax-annotation-api} + jakarta.annotation + jakarta.annotation-api + ${jakarta-annotation-api} test @@ -190,8 +179,8 @@ - javax.validation - validation-api + jakarta.validation + jakarta.validation-api ${validation} true @@ -204,44 +193,44 @@ - org.hibernate - hibernate-validator - 5.4.3.Final - test + io.micrometer + micrometer-observation + true - org.glassfish - javax.el - 3.0.1-b11 - test + io.micrometer + micrometer-tracing + true - joda-time - joda-time - ${jodatime} + org.hibernate.validator + hibernate-validator + 7.0.1.Final test - org.threeten - threetenbp - ${threetenbp} + jakarta.el + jakarta.el-api + 4.0.0 + provided true - com.fasterxml.jackson.core - jackson-databind + org.glassfish + jakarta.el + 4.0.2 + provided true - org.slf4j - jul-to-slf4j - ${slf4j} - test + com.fasterxml.jackson.core + jackson-databind + true @@ -279,9 +268,29 @@ - javax.transaction - jta - 1.1 + org.junit.platform + junit-platform-launcher + test + + + + org.testcontainers + junit-jupiter + ${testcontainers} + test + + + + org.testcontainers + mongodb + ${testcontainers} + test + + + + jakarta.transaction + jakarta.transaction-api + 2.0.0 test @@ -312,17 +321,62 @@ io.mockk - mockk + mockk-jvm ${mockk} test + + io.micrometer + micrometer-test + test + + + com.github.tomakehurst + wiremock-jre8-standalone + + + + + io.micrometer + micrometer-tracing-test + test + + + + io.micrometer + micrometer-tracing-integration-test + test + + + + + org.jmolecules + jmolecules-ddd + ${jmolecules} + test + + + + org.apache.maven.plugins + maven-compiler-plugin + + + + org.openjdk.jmh + jmh-generator-annprocess + ${jmh} + + + + + com.mysema.maven apt-maven-plugin @@ -341,8 +395,11 @@ test-process - target/generated-test-sources - org.springframework.data.mongodb.repository.support.MongoAnnotationProcessor + target/generated-test-sources + + + org.springframework.data.mongodb.repository.support.MongoAnnotationProcessor + @@ -362,7 +419,11 @@ **/ReactivePerformanceTests.java - src/test/resources/logging.properties + ${mongo} + ${env.MONGO_VERSION} + + src/test/resources/logging.properties + true diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java similarity index 95% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java index 79e64ff4be..3b0c72cc0b 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,6 +19,7 @@ import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.TearDown; + import org.springframework.beans.factory.annotation.Value; import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithQuery; @@ -27,8 +28,8 @@ import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.microbenchmark.AbstractMicrobenchmark; -import com.mongodb.MongoClient; -import com.mongodb.ServerAddress; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; import com.mongodb.client.MongoCollection; /** @@ -56,7 +57,7 @@ public class ProjectionsBenchmark extends AbstractMicrobenchmark { @Setup public void setUp() { - client = new MongoClient(new ServerAddress()); + client = MongoClients.create(); template = new MongoTemplate(client, DB_NAME); source = new Person(); @@ -83,7 +84,7 @@ public void setUp() { @TearDown public void tearDown() { - client.dropDatabase(DB_NAME); + client.getDatabase(DB_NAME).drop(); client.close(); } diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java similarity index 78% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java index 83eeec9eab..53f64f2a50 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,8 +18,6 @@ import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import lombok.Data; - import java.util.ArrayList; import java.util.List; @@ -29,14 +27,15 @@ import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.TearDown; + import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.mapping.DBRef; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.microbenchmark.AbstractMicrobenchmark; -import com.mongodb.MongoClient; -import com.mongodb.ServerAddress; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; /** * @author Christoph Strobl @@ -55,7 +54,7 @@ public class DbRefMappingBenchmark extends AbstractMicrobenchmark { @Setup public void setUp() throws Exception { - client = new MongoClient(new ServerAddress()); + client = MongoClients.create(); template = new MongoTemplate(client, DB_NAME); List refObjects = new ArrayList<>(); @@ -80,7 +79,7 @@ public void setUp() throws Exception { @TearDown public void tearDown() { - client.dropDatabase(DB_NAME); + client.getDatabase(DB_NAME).drop(); client.close(); } @@ -94,18 +93,56 @@ public ObjectWithDBRef readMultipleDbRefs() { return template.findOne(queryObjectWithDBRefList, ObjectWithDBRef.class); } - @Data static class ObjectWithDBRef { private @Id ObjectId id; private @DBRef RefObject ref; private @DBRef List refList; + + public ObjectId getId() { + return id; + } + + public void setId(ObjectId id) { + this.id = id; + } + + public RefObject getRef() { + return ref; + } + + public void setRef(RefObject ref) { + this.ref = ref; + } + + public List getRefList() { + return refList; + } + + public void setRefList(List refList) { + this.refList = refList; + } } - @Data static class RefObject { private @Id String id; private String someValue; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getSomeValue() { + return someValue; + } + + public void setSomeValue(String someValue) { + this.someValue = someValue; + } } } diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java similarity index 74% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java index 0941d9c0ac..00d2e7034a 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,11 +15,6 @@ */ package org.springframework.data.mongodb.core.convert; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.Getter; -import lombok.RequiredArgsConstructor; - import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; @@ -29,25 +24,29 @@ import org.bson.Document; import org.bson.types.ObjectId; +import org.junit.platform.commons.annotation.Testable; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.TearDown; + import org.springframework.data.annotation.Id; import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.microbenchmark.AbstractMicrobenchmark; +import org.springframework.util.ObjectUtils; -import com.mongodb.MongoClient; -import com.mongodb.ServerAddress; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; /** * @author Christoph Strobl */ @State(Scope.Benchmark) +@Testable public class MappingMongoConverterBenchmark extends AbstractMicrobenchmark { private static final String DB_NAME = "mapping-mongo-converter-benchmark"; @@ -64,13 +63,13 @@ public class MappingMongoConverterBenchmark extends AbstractMicrobenchmark { @Setup public void setUp() throws Exception { - client = new MongoClient(new ServerAddress()); + client = MongoClients.create(); this.mappingContext = new MongoMappingContext(); this.mappingContext.setInitialEntitySet(Collections.singleton(Customer.class)); this.mappingContext.afterPropertiesSet(); - DbRefResolver dbRefResolver = new DefaultDbRefResolver(new SimpleMongoDbFactory(client, DB_NAME)); + DbRefResolver dbRefResolver = new DefaultDbRefResolver(new SimpleMongoClientDatabaseFactory(client, DB_NAME)); this.converter = new MappingMongoConverter(dbRefResolver, mappingContext); this.converter.setCustomConversions(new MongoCustomConversions(Collections.emptyList())); @@ -116,7 +115,7 @@ public void setUp() throws Exception { @TearDown public void tearDown() { - client.dropDatabase(DB_NAME); + client.getDatabase(DB_NAME).drop(); client.close(); } @@ -151,22 +150,36 @@ public Object writeObjectWithListAndMapsOfComplexType() { return sink; } - @Getter - @RequiredArgsConstructor static class Customer { private @Id ObjectId id; private final String firstname, lastname; private final Address address; + + public Customer(String firstname, String lastname, Address address) { + this.firstname = firstname; + this.lastname = lastname; + this.address = address; + } } - @Getter - @AllArgsConstructor static class Address { private String zipCode, city; + + public Address(String zipCode, String city) { + this.zipCode = zipCode; + this.city = city; + } + + public String getZipCode() { + return zipCode; + } + + public String getCity() { + return city; + } } - @Data static class SlightlyMoreComplexObject { @Id String id; @@ -177,5 +190,59 @@ static class SlightlyMoreComplexObject { Customer customer; List
addressList; Map customerMap; + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof SlightlyMoreComplexObject)) { + return false; + } + SlightlyMoreComplexObject that = (SlightlyMoreComplexObject) o; + if (intOne != that.intOne) { + return false; + } + if (intTwo != that.intTwo) { + return false; + } + if (!ObjectUtils.nullSafeEquals(id, that.id)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(stringOne, that.stringOne)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(stringTwo, that.stringTwo)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(renamedField, that.renamedField)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(location, that.location)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(customer, that.customer)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(addressList, that.addressList)) { + return false; + } + return ObjectUtils.nullSafeEquals(customerMap, that.customerMap); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(id); + result = 31 * result + intOne; + result = 31 * result + intTwo; + result = 31 * result + ObjectUtils.nullSafeHashCode(stringOne); + result = 31 * result + ObjectUtils.nullSafeHashCode(stringTwo); + result = 31 * result + ObjectUtils.nullSafeHashCode(renamedField); + result = 31 * result + ObjectUtils.nullSafeHashCode(location); + result = 31 * result + ObjectUtils.nullSafeHashCode(customer); + result = 31 * result + ObjectUtils.nullSafeHashCode(addressList); + result = 31 * result + ObjectUtils.nullSafeHashCode(customerMap); + return result; + } } } diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java similarity index 97% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java index 5c99c07fa6..615500904d 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,7 +21,6 @@ import java.util.Collection; import java.util.Date; -import org.junit.Test; import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.Scope; @@ -33,6 +32,7 @@ import org.openjdk.jmh.runner.options.ChainedOptionsBuilder; import org.openjdk.jmh.runner.options.OptionsBuilder; import org.openjdk.jmh.runner.options.TimeValue; + import org.springframework.core.env.StandardEnvironment; import org.springframework.util.CollectionUtils; import org.springframework.util.ResourceUtils; @@ -41,8 +41,8 @@ /** * @author Christoph Strobl */ -@Warmup(iterations = AbstractMicrobenchmark.WARMUP_ITERATIONS) -@Measurement(iterations = AbstractMicrobenchmark.MEASUREMENT_ITERATIONS) +@Warmup(iterations = AbstractMicrobenchmark.WARMUP_ITERATIONS, time = 2) +@Measurement(iterations = AbstractMicrobenchmark.MEASUREMENT_ITERATIONS, time = 2) @Fork(AbstractMicrobenchmark.FORKS) @State(Scope.Thread) public class AbstractMicrobenchmark { @@ -62,7 +62,6 @@ public class AbstractMicrobenchmark { * @throws Exception * @see #options(String) */ - @Test public void run() throws Exception { String includes = includes(); @@ -322,7 +321,7 @@ private void publishResults(Collection results) { try { ResultsWriter.forUri(uri).write(results); } catch (Exception e) { - System.err.println(String.format("Cannot save benchmark results to '%s'. Error was %s.", uri, e)); + System.err.println(String.format("Cannot save benchmark results to '%s'; Error was %s", uri, e)); } } } diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java similarity index 91% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java index 8d3e57eecc..af56908755 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,8 +15,8 @@ */ package org.springframework.data.mongodb.microbenchmark; -import lombok.SneakyThrows; +import java.io.IOException; import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.URL; @@ -43,13 +43,20 @@ class HttpResultsWriter implements ResultsWriter { } @Override - @SneakyThrows public void write(Collection results) { if (CollectionUtils.isEmpty(results)) { return; } + try { + doWrite(results); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private void doWrite(Collection results) throws IOException { StandardEnvironment env = new StandardEnvironment(); String projectVersion = env.getProperty("project.version", "unknown"); diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java similarity index 86% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java index 4167d3c01d..2114d2a06a 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,15 +21,16 @@ import org.bson.Document; import org.openjdk.jmh.results.RunResult; + import org.springframework.core.env.StandardEnvironment; import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; import com.mongodb.BasicDBObject; -import com.mongodb.MongoClient; -import com.mongodb.MongoClientURI; +import com.mongodb.ConnectionString; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; import com.mongodb.client.MongoDatabase; -import com.mongodb.util.JSON; /** * MongoDB specific {@link ResultsWriter} implementation. @@ -56,13 +57,14 @@ public void write(Collection results) { String gitDirty = env.getProperty("git.dirty", "no"); String gitCommitId = env.getProperty("git.commit.id", "unknown"); - MongoClientURI uri = new MongoClientURI(this.uri); - MongoClient client = new MongoClient(uri); + ConnectionString connectionString = new ConnectionString(this.uri); + MongoClient client = MongoClients.create(this.uri); - String dbName = StringUtils.hasText(uri.getDatabase()) ? uri.getDatabase() : "spring-data-mongodb-benchmarks"; + String dbName = StringUtils.hasText(connectionString.getDatabase()) ? connectionString.getDatabase() + : "spring-data-mongodb-benchmarks"; MongoDatabase db = client.getDatabase(dbName); - for (BasicDBObject dbo : (List) JSON.parse(ResultsWriter.jsonifyResults(results))) { + for (Document dbo : (List) Document.parse(ResultsWriter.jsonifyResults(results))) { String collectionName = extractClass(dbo.get("benchmark").toString()); diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java similarity index 89% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java index c55e10daaa..95da1750bc 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,8 +15,6 @@ */ package org.springframework.data.mongodb.microbenchmark; -import lombok.SneakyThrows; - import java.io.ByteArrayOutputStream; import java.io.PrintStream; import java.nio.charset.StandardCharsets; @@ -54,13 +52,12 @@ static ResultsWriter forUri(String uri) { * * @param results * @return json string representation of results. - * @see org.openjdk.jmh.results.format.JSONResultFormat */ - @SneakyThrows static String jsonifyResults(Collection results) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); - ResultFormatFactory.getInstance(ResultFormatType.JSON, new PrintStream(baos, true, "UTF-8")).writeOut(results); + ResultFormatFactory.getInstance(ResultFormatType.JSON, new PrintStream(baos, true, StandardCharsets.UTF_8)) + .writeOut(results); return new String(baos.toByteArray(), StandardCharsets.UTF_8); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java index 982f683d53..1f6875c080 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java @@ -1,5 +1,5 @@ /* - * Copyright 2021 the original author or authors. + * Copyright 2021-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,15 +23,16 @@ import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec; import org.springframework.data.util.Lazy; import org.springframework.lang.Nullable; +import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; /** * A {@link MongoExpression} using the {@link ParameterBindingDocumentCodec} for parsing a raw ({@literal json}) * expression. The expression will be wrapped within { ... } if necessary. The actual parsing and parameter - * binding of placeholders like {@code ?0} is delayed upon first call on the the target {@link Document} via + * binding of placeholders like {@code ?0} is delayed upon first call on the target {@link Document} via * {@link #toDocument()}. - *

+ *
* *

  * $toUpper : $name                -> { '$toUpper' : '$name' }
@@ -45,6 +46,7 @@
  * containing the required {@link org.bson.codecs.Codec codec} via {@link #withCodecRegistry(CodecRegistry)}.
  *
  * @author Christoph Strobl
+ * @author Giacomo Baso
  * @since 3.2
  */
 public class BindableMongoExpression implements MongoExpression {
@@ -77,6 +79,8 @@ public BindableMongoExpression(String expression, @Nullable Object[] args) {
 	public BindableMongoExpression(String expression, @Nullable CodecRegistryProvider codecRegistryProvider,
 			@Nullable Object[] args) {
 
+		Assert.notNull(expression, "Expression must not be null");
+
 		this.expressionString = expression;
 		this.codecRegistryProvider = codecRegistryProvider;
 		this.args = args;
@@ -103,19 +107,11 @@ public BindableMongoExpression bind(Object... args) {
 		return new BindableMongoExpression(expressionString, codecRegistryProvider, args);
 	}
 
-	/*
-	 * (non-Javadoc)
-	 * @see org.springframework.data.mongodb.MongoExpression#toDocument()
-	 */
 	@Override
 	public Document toDocument() {
 		return target.get();
 	}
 
-	/*
-	 * (non-Javadoc)
-	 * @see java.lang.Object#toString()
-	 */
 	@Override
 	public String toString() {
 		return "BindableMongoExpression{" + "expressionString='" + expressionString + '\'' + ", args="
@@ -143,10 +139,11 @@ private Document parse() {
 
 	private static String wrapJsonIfNecessary(String json) {
 
-		if (StringUtils.hasText(json) && (json.startsWith("{") && json.endsWith("}"))) {
+		if(!StringUtils.hasText(json)) {
 			return json;
 		}
 
-		return "{" + json + "}";
+		String raw = json.trim();
+		return (raw.startsWith("{") && raw.endsWith("}")) ? raw : "{%s}".formatted(raw);
 	}
 }
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java
index 03fadff0fe..b36382a58e 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2015-2021 the original author or authors.
+ * Copyright 2015-2025 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ClientSessionException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ClientSessionException.java
index fae652c7f4..53acf65470 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ClientSessionException.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ClientSessionException.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2018-2021 the original author or authors.
+ * Copyright 2018-2025 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CodecRegistryProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CodecRegistryProvider.java
index 60b2027763..53515f9fcd 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CodecRegistryProvider.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CodecRegistryProvider.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2017-2021 the original author or authors.
+ * Copyright 2017-2025 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -62,7 +62,7 @@ default boolean hasCodecFor(Class type) {
 	 */
 	default  Optional> getCodecFor(Class type) {
 
-		Assert.notNull(type, "Type must not be null!");
+		Assert.notNull(type, "Type must not be null");
 
 		try {
 			return Optional.of(getCodecRegistry().get(type));
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/DefaultMongoTransactionOptionsResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/DefaultMongoTransactionOptionsResolver.java
new file mode 100644
index 0000000000..c07e2dbe4a
--- /dev/null
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/DefaultMongoTransactionOptionsResolver.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2024-2025 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.mongodb;
+
+import java.util.Map;
+import java.util.Set;
+
+import org.springframework.lang.Nullable;
+
+/**
+ * Default implementation of {@link MongoTransactionOptions} using {@literal mongo:} as {@link #getLabelPrefix() label
+ * prefix} creating {@link SimpleMongoTransactionOptions} out of a given argument {@link Map}. Uses
+ * {@link SimpleMongoTransactionOptions#KNOWN_KEYS} to validate entries in arguments to resolve and errors on unknown
+ * entries.
+ *
+ * @author Christoph Strobl
+ * @since 4.3
+ */
+enum DefaultMongoTransactionOptionsResolver implements MongoTransactionOptionsResolver {
+
+	INSTANCE;
+
+	private static final String PREFIX = "mongo:";
+
+	@Override
+	public MongoTransactionOptions convert(Map options) {
+
+		validateKeys(options.keySet());
+		return SimpleMongoTransactionOptions.of(options);
+	}
+
+	@Nullable
+	@Override
+	public String getLabelPrefix() {
+		return PREFIX;
+	}
+
+	private static void validateKeys(Set keys) {
+
+		if (!SimpleMongoTransactionOptions.KNOWN_KEYS.containsAll(keys)) {
+
+			throw new IllegalArgumentException("Transaction labels contained invalid values. Has to be one of %s"
+					.formatted(SimpleMongoTransactionOptions.KNOWN_KEYS));
+		}
+	}
+}
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/InvalidMongoDbApiUsageException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/InvalidMongoDbApiUsageException.java
index 89e035ae69..f95a3c5310 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/InvalidMongoDbApiUsageException.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/InvalidMongoDbApiUsageException.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2010-2021 the original author or authors.
+ * Copyright 2010-2025 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/LazyLoadingException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/LazyLoadingException.java
index 855a59c55d..3fc3f82fbf 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/LazyLoadingException.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/LazyLoadingException.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2013-2021 the original author or authors.
+ * Copyright 2013-2025 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java
index 3d85a33dcb..72b2794d05 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2011-2021 the original author or authors.
+ * Copyright 2011-2025 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -20,8 +20,8 @@
 
 /**
  * Helper class featuring helper methods for working with MongoDb collections.
- * 

- *

+ *
+ *
* Mainly intended for internal use within the framework. * * @author Thomas Risberg diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseFactory.java index 612c3eeb3e..1fcd5de516 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseFactory.java @@ -1,5 +1,5 @@ /* - * Copyright 2011-2021 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java index ba8efa536c..f73f9fb7ed 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,7 +30,7 @@ * Helper class for managing a {@link MongoDatabase} instances via {@link MongoDatabaseFactory}. Used for obtaining * {@link ClientSession session bound} resources, such as {@link MongoDatabase} and * {@link com.mongodb.client.MongoCollection} suitable for transactional usage. - *

+ *
* Note: Intended for internal usage only. * * @author Christoph Strobl @@ -43,7 +43,7 @@ public class MongoDatabaseUtils { /** * Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory} using * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. - *

+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * @@ -56,7 +56,7 @@ public static MongoDatabase getDatabase(MongoDatabaseFactory factory) { /** * Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory}. - *

+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * @@ -71,7 +71,7 @@ public static MongoDatabase getDatabase(MongoDatabaseFactory factory, SessionSyn /** * Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory} using * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. - *

+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * @@ -85,7 +85,7 @@ public static MongoDatabase getDatabase(@Nullable String dbName, MongoDatabaseFa /** * Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory}. - *

+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * @@ -102,9 +102,10 @@ public static MongoDatabase getDatabase(@Nullable String dbName, MongoDatabaseFa private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDatabaseFactory factory, SessionSynchronization sessionSynchronization) { - Assert.notNull(factory, "Factory must not be null!"); + Assert.notNull(factory, "Factory must not be null"); - if (!TransactionSynchronizationManager.isSynchronizationActive()) { + if (sessionSynchronization == SessionSynchronization.NEVER + || !TransactionSynchronizationManager.isSynchronizationActive()) { return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase(); } @@ -192,19 +193,11 @@ private static class MongoSessionSynchronization extends ResourceHolderSynchroni this.resourceHolder = resourceHolder; } - /* - * (non-Javadoc) - * @see org.springframework.transaction.support.ResourceHolderSynchronization#shouldReleaseBeforeCompletion() - */ @Override protected boolean shouldReleaseBeforeCompletion() { return false; } - /* - * (non-Javadoc) - * @see org.springframework.transaction.support.ResourceHolderSynchronization#processResourceAfterCommit(java.lang.Object) - */ @Override protected void processResourceAfterCommit(MongoResourceHolder resourceHolder) { @@ -213,10 +206,6 @@ protected void processResourceAfterCommit(MongoResourceHolder resourceHolder) { } } - /* - * (non-Javadoc) - * @see org.springframework.transaction.support.ResourceHolderSynchronization#afterCompletion(int) - */ @Override public void afterCompletion(int status) { @@ -227,10 +216,6 @@ public void afterCompletion(int status) { super.afterCompletion(status); } - /* - * (non-Javadoc) - * @see org.springframework.transaction.support.ResourceHolderSynchronization#releaseResource(java.lang.Object, java.lang.Object) - */ @Override protected void releaseResource(MongoResourceHolder resourceHolder, Object resourceKey) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDbFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDbFactory.java deleted file mode 100644 index 9356a9e7d3..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDbFactory.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2011-2021 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb; - -import org.springframework.dao.DataAccessException; - -import com.mongodb.client.MongoDatabase; - -/** - * Interface for factories creating {@link MongoDatabase} instances. - * - * @author Mark Pollack - * @author Thomas Darimont - * @author Christoph Strobl - * @deprecated since 3.0, use {@link MongoDatabaseFactory} instead. - */ -@Deprecated -public interface MongoDbFactory extends MongoDatabaseFactory { - - /** - * Creates a default {@link MongoDatabase} instance. - * - * @return never {@literal null}. - * @throws DataAccessException - * @deprecated since 3.0. Use {@link #getMongoDatabase()} instead. - */ - @Deprecated - default MongoDatabase getDb() throws DataAccessException { - return getMongoDatabase(); - } - - /** - * Obtain a {@link MongoDatabase} instance to access the database with the given name. - * - * @param dbName must not be {@literal null} or empty. - * @return never {@literal null}. - * @throws DataAccessException - * @deprecated since 3.0. Use {@link #getMongoDatabase(String)} instead. - */ - @Deprecated - default MongoDatabase getDb(String dbName) throws DataAccessException { - return getMongoDatabase(dbName); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java index 541118b114..a087439d72 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java @@ -1,5 +1,5 @@ /* - * Copyright 2021 the original author or authors. + * Copyright 2021-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,7 +18,7 @@ /** * Wrapper object for MongoDB expressions like {@code $toUpper : $name} that manifest as {@link org.bson.Document} when * passed on to the driver. - *

+ *
* A set of predefined {@link MongoExpression expressions}, including a * {@link org.springframework.data.mongodb.core.aggregation.AggregationSpELExpression SpEL based variant} for method * like expressions (eg. {@code toUpper(name)}) are available via the diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoManagedTypes.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoManagedTypes.java new file mode 100644 index 0000000000..39c4815d47 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoManagedTypes.java @@ -0,0 +1,81 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.util.Arrays; +import java.util.function.Consumer; + +import org.springframework.data.domain.ManagedTypes; + +/** + * @author Christoph Strobl + * @since 4.0 + */ +public final class MongoManagedTypes implements ManagedTypes { + + private final ManagedTypes delegate; + + private MongoManagedTypes(ManagedTypes types) { + this.delegate = types; + } + + /** + * Wraps an existing {@link ManagedTypes} object with {@link MongoManagedTypes}. + * + * @param managedTypes + * @return + */ + public static MongoManagedTypes from(ManagedTypes managedTypes) { + return new MongoManagedTypes(managedTypes); + } + + /** + * Factory method used to construct {@link MongoManagedTypes} from the given array of {@link Class types}. + * + * @param types array of {@link Class types} used to initialize the {@link ManagedTypes}; must not be {@literal null}. + * @return new instance of {@link MongoManagedTypes} initialized from {@link Class types}. + */ + public static MongoManagedTypes from(Class... types) { + return fromIterable(Arrays.asList(types)); + } + + /** + * Factory method used to construct {@link MongoManagedTypes} from the given, required {@link Iterable} of + * {@link Class types}. + * + * @param types {@link Iterable} of {@link Class types} used to initialize the {@link ManagedTypes}; must not be + * {@literal null}. + * @return new instance of {@link MongoManagedTypes} initialized the given, required {@link Iterable} of {@link Class + * types}. + */ + public static MongoManagedTypes fromIterable(Iterable> types) { + return from(ManagedTypes.fromIterable(types)); + } + + /** + * Factory method to return an empty {@link MongoManagedTypes} object. + * + * @return an empty {@link MongoManagedTypes} object. + */ + public static MongoManagedTypes empty() { + return from(ManagedTypes.empty()); + } + + @Override + public void forEach(Consumer> action) { + delegate.forEach(action); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java index 90a3b32023..a1e8344a9f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -24,7 +24,7 @@ /** * MongoDB specific {@link ResourceHolderSupport resource holder}, wrapping a {@link ClientSession}. * {@link MongoTransactionManager} binds instances of this class to the thread. - *

+ *
* Note: Intended for internal usage only. * * @author Christoph Strobl @@ -68,7 +68,7 @@ ClientSession getRequiredSession() { ClientSession session = getSession(); if (session == null) { - throw new IllegalStateException("No session available!"); + throw new IllegalStateException("No session available"); } return session; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoSessionProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoSessionProvider.java index 79a68e83ac..645b3508db 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoSessionProvider.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoSessionProvider.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionException.java index b2a998548a..4215479f62 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionException.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionException.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java index 1e6013d73d..eda657f5f1 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -37,18 +37,18 @@ /** * A {@link org.springframework.transaction.PlatformTransactionManager} implementation that manages * {@link ClientSession} based transactions for a single {@link MongoDatabaseFactory}. - *

+ *
* Binds a {@link ClientSession} from the specified {@link MongoDatabaseFactory} to the thread. - *

+ *
* {@link TransactionDefinition#isReadOnly() Readonly} transactions operate on a {@link ClientSession} and enable causal * consistency, and also {@link ClientSession#startTransaction() start}, {@link ClientSession#commitTransaction() * commit} or {@link ClientSession#abortTransaction() abort} a transaction. - *

+ *
* Application code is required to retrieve the {@link com.mongodb.client.MongoDatabase} via * {@link MongoDatabaseUtils#getDatabase(MongoDatabaseFactory)} instead of a standard * {@link MongoDatabaseFactory#getMongoDatabase()} call. Spring classes such as * {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly. - *

+ *
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. One may override * {@link #doCommit(MongoTransactionObject)} to implement the * Retry Commit Operation @@ -64,52 +64,65 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager implements ResourceTransactionManager, InitializingBean { - private @Nullable MongoDatabaseFactory dbFactory; - private @Nullable TransactionOptions options; + private @Nullable MongoDatabaseFactory databaseFactory; + private MongoTransactionOptions options; + private final MongoTransactionOptionsResolver transactionOptionsResolver; /** - * Create a new {@link MongoTransactionManager} for bean-style usage. - *

+ * Create a new {@link MongoTransactionManager} for bean-style usage.
* Note:The {@link MongoDatabaseFactory db factory} has to be - * {@link #setDbFactory(MongoDatabaseFactory) set} before using the instance. Use this constructor to prepare a - * {@link MongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}. - *

+ * {@link #setDatabaseFactory(MongoDatabaseFactory) set} before using the instance. Use this constructor to prepare a + * {@link MongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining * {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}. * - * @see #setDbFactory(MongoDatabaseFactory) + * @see #setDatabaseFactory(MongoDatabaseFactory) * @see #setTransactionSynchronization(int) */ - public MongoTransactionManager() {} + public MongoTransactionManager() { + this.transactionOptionsResolver = MongoTransactionOptionsResolver.defaultResolver(); + } /** * Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory}. * - * @param dbFactory must not be {@literal null}. + * @param databaseFactory must not be {@literal null}. */ - public MongoTransactionManager(MongoDatabaseFactory dbFactory) { - this(dbFactory, null); + public MongoTransactionManager(MongoDatabaseFactory databaseFactory) { + this(databaseFactory, null); } /** * Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory} * applying the given {@link TransactionOptions options}, if present, when starting a new transaction. * - * @param dbFactory must not be {@literal null}. + * @param databaseFactory must not be {@literal null}. * @param options can be {@literal null}. */ - public MongoTransactionManager(MongoDatabaseFactory dbFactory, @Nullable TransactionOptions options) { + public MongoTransactionManager(MongoDatabaseFactory databaseFactory, @Nullable TransactionOptions options) { + this(databaseFactory, MongoTransactionOptionsResolver.defaultResolver(), MongoTransactionOptions.of(options)); + } - Assert.notNull(dbFactory, "DbFactory must not be null!"); + /** + * Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory} + * applying the given {@link TransactionOptions options}, if present, when starting a new transaction. + * + * @param databaseFactory must not be {@literal null}. + * @param transactionOptionsResolver must not be {@literal null}. + * @param defaultTransactionOptions can be {@literal null}. + * @since 4.3 + */ + public MongoTransactionManager(MongoDatabaseFactory databaseFactory, + MongoTransactionOptionsResolver transactionOptionsResolver, MongoTransactionOptions defaultTransactionOptions) { + + Assert.notNull(databaseFactory, "MongoDatabaseFactory must not be null"); + Assert.notNull(transactionOptionsResolver, "MongoTransactionOptionsResolver must not be null"); - this.dbFactory = dbFactory; - this.options = options; + this.databaseFactory = databaseFactory; + this.transactionOptionsResolver = transactionOptionsResolver; + this.options = defaultTransactionOptions; } - /* - * (non-Javadoc) - * org.springframework.transaction.support.AbstractPlatformTransactionManager#doGetTransaction() - */ @Override protected Object doGetTransaction() throws TransactionException { @@ -118,19 +131,11 @@ protected Object doGetTransaction() throws TransactionException { return new MongoTransactionObject(resourceHolder); } - /* - * (non-Javadoc) - * org.springframework.transaction.support.AbstractPlatformTransactionManager#isExistingTransaction(java.lang.Object) - */ @Override protected boolean isExistingTransaction(Object transaction) throws TransactionException { return extractMongoTransaction(transaction).hasResourceHolder(); } - /* - * (non-Javadoc) - * org.springframework.transaction.support.AbstractPlatformTransactionManager#doBegin(java.lang.Object, org.springframework.transaction.TransactionDefinition) - */ @Override protected void doBegin(Object transaction, TransactionDefinition definition) throws TransactionException { @@ -146,7 +151,8 @@ protected void doBegin(Object transaction, TransactionDefinition definition) thr } try { - mongoTransactionObject.startTransaction(options); + MongoTransactionOptions mongoTransactionOptions = transactionOptionsResolver.resolve(definition).mergeWith(options); + mongoTransactionObject.startTransaction(mongoTransactionOptions.toDriverOptions()); } catch (MongoException ex) { throw new TransactionSystemException(String.format("Could not start Mongo transaction for session %s.", debugString(mongoTransactionObject.getSession())), ex); @@ -160,10 +166,6 @@ protected void doBegin(Object transaction, TransactionDefinition definition) thr TransactionSynchronizationManager.bindResource(getRequiredDbFactory(), resourceHolder); } - /* - * (non-Javadoc) - * org.springframework.transaction.support.AbstractPlatformTransactionManager#doSuspend(java.lang.Object) - */ @Override protected Object doSuspend(Object transaction) throws TransactionException { @@ -173,19 +175,11 @@ protected Object doSuspend(Object transaction) throws TransactionException { return TransactionSynchronizationManager.unbindResource(getRequiredDbFactory()); } - /* - * (non-Javadoc) - * org.springframework.transaction.support.AbstractPlatformTransactionManager#doResume(java.lang.Object, java.lang.Object) - */ @Override protected void doResume(@Nullable Object transaction, Object suspendedResources) { TransactionSynchronizationManager.bindResource(getRequiredDbFactory(), suspendedResources); } - /* - * (non-Javadoc) - * org.springframework.transaction.support.AbstractPlatformTransactionManager#doCommit(org.springframework.transaction.support.DefaultTransactionStatus) - */ @Override protected final void doCommit(DefaultTransactionStatus status) throws TransactionException { @@ -212,8 +206,8 @@ protected final void doCommit(DefaultTransactionStatus status) throws Transactio * By default those labels are ignored, nevertheless one might check for * {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the * commit.
+ *

 	 * 
-	 *     
 	 * int retries = 3;
 	 * do {
 	 *     try {
@@ -226,8 +220,8 @@ protected final void doCommit(DefaultTransactionStatus status) throws Transactio
 	 *     }
 	 *     Thread.sleep(500);
 	 * } while (--retries > 0);
-	 *     
*
+ *
* * @param transactionObject never {@literal null}. * @throws Exception in case of transaction errors. @@ -236,10 +230,6 @@ protected void doCommit(MongoTransactionObject transactionObject) throws Excepti transactionObject.commitTransaction(); } - /* - * (non-Javadoc) - * org.springframework.transaction.support.AbstractPlatformTransactionManager#doRollback(org.springframework.transaction.support.DefaultTransactionStatus) - */ @Override protected void doRollback(DefaultTransactionStatus status) throws TransactionException { @@ -259,10 +249,6 @@ protected void doRollback(DefaultTransactionStatus status) throws TransactionExc } } - /* - * (non-Javadoc) - * org.springframework.transaction.support.AbstractPlatformTransactionManager#doSetRollbackOnly(org.springframework.transaction.support.DefaultTransactionStatus) - */ @Override protected void doSetRollbackOnly(DefaultTransactionStatus status) throws TransactionException { @@ -270,10 +256,6 @@ protected void doSetRollbackOnly(DefaultTransactionStatus status) throws Transac transactionObject.getRequiredResourceHolder().setRollbackOnly(); } - /* - * (non-Javadoc) - * org.springframework.transaction.support.AbstractPlatformTransactionManager#doCleanupAfterCompletion(java.lang.Object) - */ @Override protected void doCleanupAfterCompletion(Object transaction) { @@ -298,12 +280,12 @@ protected void doCleanupAfterCompletion(Object transaction) { /** * Set the {@link MongoDatabaseFactory} that this instance should manage transactions for. * - * @param dbFactory must not be {@literal null}. + * @param databaseFactory must not be {@literal null}. */ - public void setDbFactory(MongoDatabaseFactory dbFactory) { + public void setDatabaseFactory(MongoDatabaseFactory databaseFactory) { - Assert.notNull(dbFactory, "DbFactory must not be null!"); - this.dbFactory = dbFactory; + Assert.notNull(databaseFactory, "DbFactory must not be null"); + this.databaseFactory = databaseFactory; } /** @@ -312,7 +294,7 @@ public void setDbFactory(MongoDatabaseFactory dbFactory) { * @param options can be {@literal null}. */ public void setOptions(@Nullable TransactionOptions options) { - this.options = options; + this.options = MongoTransactionOptions.of(options); } /** @@ -321,23 +303,15 @@ public void setOptions(@Nullable TransactionOptions options) { * @return can be {@literal null}. */ @Nullable - public MongoDatabaseFactory getDbFactory() { - return dbFactory; + public MongoDatabaseFactory getDatabaseFactory() { + return databaseFactory; } - /* - * (non-Javadoc) - * @see org.springframework.transaction.support.ResourceTransactionManager#getResourceFactory() - */ @Override public MongoDatabaseFactory getResourceFactory() { return getRequiredDbFactory(); } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() - */ @Override public void afterPropertiesSet() { getRequiredDbFactory(); @@ -354,14 +328,14 @@ private MongoResourceHolder newResourceHolder(TransactionDefinition definition, } /** - * @throws IllegalStateException if {@link #dbFactory} is {@literal null}. + * @throws IllegalStateException if {@link #databaseFactory} is {@literal null}. */ private MongoDatabaseFactory getRequiredDbFactory() { - Assert.state(dbFactory != null, - "MongoTransactionManager operates upon a MongoDbFactory. Did you forget to provide one? It's required."); + Assert.state(databaseFactory != null, + "MongoTransactionManager operates upon a MongoDbFactory; Did you forget to provide one; It's required"); - return dbFactory; + return databaseFactory; } private static MongoTransactionObject extractMongoTransaction(Object transaction) { @@ -397,7 +371,7 @@ private static String debugString(@Nullable ClientSession session) { debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent()); debugString += String.format("txActive = %s, ", session.hasActiveTransaction()); debugString += String.format("txNumber = %d, ", session.getServerSession().getTransactionNumber()); - debugString += String.format("closed = %d, ", session.getServerSession().isClosed()); + debugString += String.format("closed = %b, ", session.getServerSession().isClosed()); debugString += String.format("clusterTime = %s", session.getClusterTime()); } else { debugString += "id = n/a"; @@ -494,30 +468,22 @@ public ClientSession getSession() { private MongoResourceHolder getRequiredResourceHolder() { - Assert.state(resourceHolder != null, "MongoResourceHolder is required but not present. o_O"); + Assert.state(resourceHolder != null, "MongoResourceHolder is required but not present; o_O"); return resourceHolder; } private ClientSession getRequiredSession() { ClientSession session = getSession(); - Assert.state(session != null, "A Session is required but it turned out to be null."); + Assert.state(session != null, "A Session is required but it turned out to be null"); return session; } - /* - * (non-Javadoc) - * @see org.springframework.transaction.support.SmartTransactionObject#isRollbackOnly() - */ @Override public boolean isRollbackOnly() { return this.resourceHolder != null && this.resourceHolder.isRollbackOnly(); } - /* - * (non-Javadoc) - * @see org.springframework.transaction.support.SmartTransactionObject#flush() - */ @Override public void flush() { TransactionSynchronizationUtils.triggerFlush(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptions.java new file mode 100644 index 0000000000..e411bd5d2d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptions.java @@ -0,0 +1,204 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.time.Duration; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; + +import org.springframework.data.mongodb.core.ReadConcernAware; +import org.springframework.data.mongodb.core.ReadPreferenceAware; +import org.springframework.data.mongodb.core.WriteConcernAware; +import org.springframework.lang.Nullable; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; +import com.mongodb.TransactionOptions; +import com.mongodb.WriteConcern; + +/** + * Options to be applied within a specific transaction scope. + * + * @author Christoph Strobl + * @since 4.3 + */ +public interface MongoTransactionOptions + extends TransactionMetadata, ReadConcernAware, ReadPreferenceAware, WriteConcernAware { + + /** + * Value Object representing empty options enforcing client defaults. Returns {@literal null} for all getter methods. + */ + MongoTransactionOptions NONE = new MongoTransactionOptions() { + + @Nullable + @Override + public Duration getMaxCommitTime() { + return null; + } + + @Nullable + @Override + public ReadConcern getReadConcern() { + return null; + } + + @Nullable + @Override + public ReadPreference getReadPreference() { + return null; + } + + @Nullable + @Override + public WriteConcern getWriteConcern() { + return null; + } + }; + + /** + * Merge current options with given ones. Will return first non {@literal null} value from getters whereas the + * {@literal this} has precedence over the given fallbackOptions. + * + * @param fallbackOptions can be {@literal null}. + * @return new instance of {@link MongoTransactionOptions} or this if {@literal fallbackOptions} is {@literal null} or + * {@link #NONE}. + */ + default MongoTransactionOptions mergeWith(@Nullable MongoTransactionOptions fallbackOptions) { + + if (fallbackOptions == null || MongoTransactionOptions.NONE.equals(fallbackOptions)) { + return this; + } + + return new MongoTransactionOptions() { + + @Nullable + @Override + public Duration getMaxCommitTime() { + return MongoTransactionOptions.this.hasMaxCommitTime() ? MongoTransactionOptions.this.getMaxCommitTime() + : fallbackOptions.getMaxCommitTime(); + } + + @Nullable + @Override + public ReadConcern getReadConcern() { + return MongoTransactionOptions.this.hasReadConcern() ? MongoTransactionOptions.this.getReadConcern() + : fallbackOptions.getReadConcern(); + } + + @Nullable + @Override + public ReadPreference getReadPreference() { + return MongoTransactionOptions.this.hasReadPreference() ? MongoTransactionOptions.this.getReadPreference() + : fallbackOptions.getReadPreference(); + } + + @Nullable + @Override + public WriteConcern getWriteConcern() { + return MongoTransactionOptions.this.hasWriteConcern() ? MongoTransactionOptions.this.getWriteConcern() + : fallbackOptions.getWriteConcern(); + } + }; + } + + /** + * Apply the current options using the given mapping {@link Function} and return its result. + * + * @param mappingFunction + * @return result of the mapping function. + */ + default T map(Function mappingFunction) { + return mappingFunction.apply(this); + } + + /** + * @return MongoDB driver native {@link TransactionOptions}. + * @see MongoTransactionOptions#map(Function) + */ + @Nullable + default TransactionOptions toDriverOptions() { + + return map(it -> { + + if (MongoTransactionOptions.NONE.equals(it)) { + return null; + } + + TransactionOptions.Builder builder = TransactionOptions.builder(); + if (it.hasMaxCommitTime()) { + builder.maxCommitTime(it.getMaxCommitTime().toMillis(), TimeUnit.MILLISECONDS); + } + if (it.hasReadConcern()) { + builder.readConcern(it.getReadConcern()); + } + if (it.hasReadPreference()) { + builder.readPreference(it.getReadPreference()); + } + if (it.hasWriteConcern()) { + builder.writeConcern(it.getWriteConcern()); + } + return builder.build(); + }); + } + + /** + * Factory method to wrap given MongoDB driver native {@link TransactionOptions} into {@link MongoTransactionOptions}. + * + * @param options + * @return {@link MongoTransactionOptions#NONE} if given object is {@literal null}. + */ + static MongoTransactionOptions of(@Nullable TransactionOptions options) { + + if (options == null) { + return NONE; + } + + return new MongoTransactionOptions() { + + @Nullable + @Override + public Duration getMaxCommitTime() { + + Long millis = options.getMaxCommitTime(TimeUnit.MILLISECONDS); + return millis != null ? Duration.ofMillis(millis) : null; + } + + @Nullable + @Override + public ReadConcern getReadConcern() { + return options.getReadConcern(); + } + + @Nullable + @Override + public ReadPreference getReadPreference() { + return options.getReadPreference(); + } + + @Nullable + @Override + public WriteConcern getWriteConcern() { + return options.getWriteConcern(); + } + + @Nullable + @Override + public TransactionOptions toDriverOptions() { + return options; + } + }; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptionsResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptionsResolver.java new file mode 100644 index 0000000000..b73b079a99 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptionsResolver.java @@ -0,0 +1,114 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.util.Map; +import java.util.stream.Collectors; + +import org.springframework.lang.Nullable; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.interceptor.TransactionAttribute; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * A {@link TransactionOptionResolver} reading MongoDB specific {@link MongoTransactionOptions transaction options} from + * a {@link TransactionDefinition}. Implementations of {@link MongoTransactionOptions} may choose a specific + * {@link #getLabelPrefix() prefix} for {@link TransactionAttribute#getLabels() transaction attribute labels} to avoid + * evaluating non-store specific ones. + *

+ * {@link TransactionAttribute#getLabels()} evaluated by default should follow the property style using {@code =} to + * separate key and value pairs. + *

+ * By default {@link #resolve(TransactionDefinition)} will filter labels by the {@link #getLabelPrefix() prefix} and + * strip the prefix from the label before handing the pruned {@link Map} to the {@link #convert(Map)} function. + *

+ * A transaction definition with labels targeting MongoDB may look like the following: + *

+ * + * @Transactional(label = { "mongo:readConcern=majority" }) + * + * + * @author Christoph Strobl + * @since 4.3 + */ +public interface MongoTransactionOptionsResolver extends TransactionOptionResolver { + + /** + * Obtain the default {@link MongoTransactionOptionsResolver} implementation using a {@literal mongo:} + * {@link #getLabelPrefix() prefix}. + * + * @return instance of default {@link MongoTransactionOptionsResolver} implementation. + */ + static MongoTransactionOptionsResolver defaultResolver() { + return DefaultMongoTransactionOptionsResolver.INSTANCE; + } + + /** + * Get the prefix used to filter applicable {@link TransactionAttribute#getLabels() labels}. + * + * @return {@literal null} if no label defined. + */ + @Nullable + String getLabelPrefix(); + + /** + * Resolve {@link MongoTransactionOptions} from a given {@link TransactionDefinition} by evaluating + * {@link TransactionAttribute#getLabels()} labels if possible. + *

+ * Splits applicable labels property style using {@literal =} as deliminator and removes a potential + * {@link #getLabelPrefix() prefix} before calling {@link #convert(Map)} with filtered label values. + * + * @param definition + * @return {@link MongoTransactionOptions#NONE} in case the given {@link TransactionDefinition} is not a + * {@link TransactionAttribute} if no matching {@link TransactionAttribute#getLabels() labels} could be found. + * @throws IllegalArgumentException for options that do not map to valid transactions options or malformatted labels. + */ + @Override + default MongoTransactionOptions resolve(TransactionDefinition definition) { + + if (!(definition instanceof TransactionAttribute attribute)) { + return MongoTransactionOptions.NONE; + } + + if (attribute.getLabels().isEmpty()) { + return MongoTransactionOptions.NONE; + } + + Map attributeMap = attribute.getLabels().stream() + .filter(it -> !StringUtils.hasText(getLabelPrefix()) || it.startsWith(getLabelPrefix())) + .map(it -> StringUtils.hasText(getLabelPrefix()) ? it.substring(getLabelPrefix().length()) : it).map(it -> { + + String[] kvPair = StringUtils.split(it, "="); + Assert.isTrue(kvPair != null && kvPair.length == 2, + () -> "No value present for transaction option %s".formatted(kvPair != null ? kvPair[0] : it)); + return kvPair; + }) + + .collect(Collectors.toMap(it -> it[0].trim(), it -> it[1].trim())); + + return attributeMap.isEmpty() ? MongoTransactionOptions.NONE : convert(attributeMap); + } + + /** + * Convert the given {@link Map} into an instance of {@link MongoTransactionOptions}. + * + * @param options never {@literal null}. + * @return never {@literal null}. + * @throws IllegalArgumentException for invalid options. + */ + MongoTransactionOptions convert(Map options); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java index eba2aceb9f..f2a6714a95 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java index 711947a30d..f397818a4c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2021 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -36,7 +36,7 @@ * Helper class for managing reactive {@link MongoDatabase} instances via {@link ReactiveMongoDatabaseFactory}. Used for * obtaining {@link ClientSession session bound} resources, such as {@link MongoDatabase} and {@link MongoCollection} * suitable for transactional usage. - *

+ *
* Note: Intended for internal usage only. * * @author Mark Paluch @@ -75,7 +75,7 @@ public static Mono isTransactionActive(ReactiveMongoDatabaseFactory dat /** * Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory} using * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. - *

+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * @@ -88,7 +88,7 @@ public static Mono getDatabase(ReactiveMongoDatabaseFactory facto /** * Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory}. - *

+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * @@ -104,7 +104,7 @@ public static Mono getDatabase(ReactiveMongoDatabaseFactory facto /** * Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory * factory} using {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. - *

+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * @@ -119,7 +119,7 @@ public static Mono getDatabase(String dbName, ReactiveMongoDataba /** * Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory * factory}. - *

+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * @@ -136,7 +136,11 @@ public static Mono getDatabase(String dbName, ReactiveMongoDataba private static Mono doGetMongoDatabase(@Nullable String dbName, ReactiveMongoDatabaseFactory factory, SessionSynchronization sessionSynchronization) { - Assert.notNull(factory, "DatabaseFactory must not be null!"); + Assert.notNull(factory, "DatabaseFactory must not be null"); + + if (sessionSynchronization == SessionSynchronization.NEVER) { + return getMongoDatabaseOrDefault(dbName, factory); + } return TransactionSynchronizationManager.forCurrentTransaction() .filter(TransactionSynchronizationManager::isSynchronizationActive) // @@ -210,19 +214,11 @@ private static class MongoSessionSynchronization this.resourceHolder = resourceHolder; } - /* - * (non-Javadoc) - * @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#shouldReleaseBeforeCompletion() - */ @Override protected boolean shouldReleaseBeforeCompletion() { return false; } - /* - * (non-Javadoc) - * @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#processResourceAfterCommit(java.lang.Object) - */ @Override protected Mono processResourceAfterCommit(ReactiveMongoResourceHolder resourceHolder) { @@ -233,10 +229,6 @@ protected Mono processResourceAfterCommit(ReactiveMongoResourceHolder reso return Mono.empty(); } - /* - * (non-Javadoc) - * @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#afterCompletion(int) - */ @Override public Mono afterCompletion(int status) { @@ -252,10 +244,6 @@ public Mono afterCompletion(int status) { }); } - /* - * (non-Javadoc) - * @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#releaseResource(java.lang.Object, java.lang.Object) - */ @Override protected Mono releaseResource(ReactiveMongoResourceHolder resourceHolder, Object resourceKey) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java index b1f1c06d08..33caa5e7fe 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2021 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -24,7 +24,7 @@ /** * MongoDB specific resource holder, wrapping a {@link ClientSession}. {@link ReactiveMongoTransactionManager} binds * instances of this class to the subscriber context. - *

+ *
* Note: Intended for internal usage only. * * @author Mark Paluch diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java index 63706eff8a..2c65c26b79 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2021 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -37,22 +37,19 @@ /** * A {@link org.springframework.transaction.ReactiveTransactionManager} implementation that manages * {@link com.mongodb.reactivestreams.client.ClientSession} based transactions for a single - * {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory}. - *

+ * {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory}.
* Binds a {@link ClientSession} from the specified * {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory} to the subscriber - * {@link reactor.util.context.Context}. - *

+ * {@link reactor.util.context.Context}.
* {@link org.springframework.transaction.TransactionDefinition#isReadOnly() Readonly} transactions operate on a * {@link ClientSession} and enable causal consistency, and also {@link ClientSession#startTransaction() start}, * {@link com.mongodb.reactivestreams.client.ClientSession#commitTransaction() commit} or - * {@link ClientSession#abortTransaction() abort} a transaction. - *

+ * {@link ClientSession#abortTransaction() abort} a transaction.
* Application code is required to retrieve the {@link com.mongodb.reactivestreams.client.MongoDatabase} via * {@link org.springframework.data.mongodb.ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory)} instead * of a standard {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase()} call. Spring * classes such as {@link org.springframework.data.mongodb.core.ReactiveMongoTemplate} use this strategy implicitly. - *

+ *
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. You can override * {@link #doCommit(TransactionSynchronizationManager, ReactiveMongoTransactionObject)} to implement the * Retry Commit Operation @@ -67,21 +64,23 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransactionManager implements InitializingBean { private @Nullable ReactiveMongoDatabaseFactory databaseFactory; - private @Nullable TransactionOptions options; + private @Nullable MongoTransactionOptions options; + private final MongoTransactionOptionsResolver transactionOptionsResolver; /** - * Create a new {@link ReactiveMongoTransactionManager} for bean-style usage. - *

+ * Create a new {@link ReactiveMongoTransactionManager} for bean-style usage.
* Note:The {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory db factory} has to * be {@link #setDatabaseFactory(ReactiveMongoDatabaseFactory)} set} before using the instance. Use this constructor * to prepare a {@link ReactiveMongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}. - *

+ *
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining * {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}. * * @see #setDatabaseFactory(ReactiveMongoDatabaseFactory) */ - public ReactiveMongoTransactionManager() {} + public ReactiveMongoTransactionManager() { + this.transactionOptionsResolver = MongoTransactionOptionsResolver.defaultResolver(); + } /** * Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given @@ -103,17 +102,31 @@ public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFact */ public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory, @Nullable TransactionOptions options) { + this(databaseFactory, MongoTransactionOptionsResolver.defaultResolver(), MongoTransactionOptions.of(options)); + } + + /** + * Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given + * {@link ReactiveMongoDatabaseFactory} applying the given {@link TransactionOptions options}, if present, when + * starting a new transaction. + * + * @param databaseFactory must not be {@literal null}. + * @param transactionOptionsResolver must not be {@literal null}. + * @param defaultTransactionOptions can be {@literal null}. + * @since 4.3 + */ + public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory, + MongoTransactionOptionsResolver transactionOptionsResolver, + @Nullable MongoTransactionOptions defaultTransactionOptions) { - Assert.notNull(databaseFactory, "DatabaseFactory must not be null!"); + Assert.notNull(databaseFactory, "DatabaseFactory must not be null"); + Assert.notNull(transactionOptionsResolver, "MongoTransactionOptionsResolver must not be null"); this.databaseFactory = databaseFactory; - this.options = options; + this.transactionOptionsResolver = transactionOptionsResolver; + this.options = defaultTransactionOptions; } - /* - * (non-Javadoc) - * @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doGetTransaction(org.springframework.transaction.reactive.TransactionSynchronizationManager) - */ @Override protected Object doGetTransaction(TransactionSynchronizationManager synchronizationManager) throws TransactionException { @@ -123,19 +136,11 @@ protected Object doGetTransaction(TransactionSynchronizationManager synchronizat return new ReactiveMongoTransactionObject(resourceHolder); } - /* - * (non-Javadoc) - * @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#isExistingTransaction(java.lang.Object) - */ @Override protected boolean isExistingTransaction(Object transaction) throws TransactionException { return extractMongoTransaction(transaction).hasResourceHolder(); } - /* - * (non-Javadoc) - * @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doBegin(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object, org.springframework.transaction.TransactionDefinition) - */ @Override protected Mono doBegin(TransactionSynchronizationManager synchronizationManager, Object transaction, TransactionDefinition definition) throws TransactionException { @@ -158,7 +163,9 @@ protected Mono doBegin(TransactionSynchronizationManager synchronizationMa }).doOnNext(resourceHolder -> { - mongoTransactionObject.startTransaction(options); + MongoTransactionOptions mongoTransactionOptions = transactionOptionsResolver.resolve(definition) + .mergeWith(options); + mongoTransactionObject.startTransaction(mongoTransactionOptions.toDriverOptions()); if (logger.isDebugEnabled()) { logger.debug(String.format("Started transaction for session %s.", debugString(resourceHolder.getSession()))); @@ -175,10 +182,6 @@ protected Mono doBegin(TransactionSynchronizationManager synchronizationMa }); } - /* - * (non-Javadoc) - * @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doSuspend(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object) - */ @Override protected Mono doSuspend(TransactionSynchronizationManager synchronizationManager, Object transaction) throws TransactionException { @@ -192,10 +195,6 @@ protected Mono doSuspend(TransactionSynchronizationManager synchronizati }); } - /* - * (non-Javadoc) - * @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doResume(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object, java.lang.Object) - */ @Override protected Mono doResume(TransactionSynchronizationManager synchronizationManager, @Nullable Object transaction, Object suspendedResources) { @@ -203,10 +202,6 @@ protected Mono doResume(TransactionSynchronizationManager synchronizationM .fromRunnable(() -> synchronizationManager.bindResource(getRequiredDatabaseFactory(), suspendedResources)); } - /* - * (non-Javadoc) - * @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doCommit(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction) - */ @Override protected final Mono doCommit(TransactionSynchronizationManager synchronizationManager, GenericReactiveTransaction status) throws TransactionException { @@ -243,10 +238,6 @@ protected Mono doCommit(TransactionSynchronizationManager synchronizationM return transactionObject.commitTransaction(); } - /* - * (non-Javadoc) - * @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doRollback(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction) - */ @Override protected Mono doRollback(TransactionSynchronizationManager synchronizationManager, GenericReactiveTransaction status) { @@ -268,10 +259,6 @@ protected Mono doRollback(TransactionSynchronizationManager synchronizatio }); } - /* - * (non-Javadoc) - * @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doSetRollbackOnly(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction) - */ @Override protected Mono doSetRollbackOnly(TransactionSynchronizationManager synchronizationManager, GenericReactiveTransaction status) throws TransactionException { @@ -282,10 +269,6 @@ protected Mono doSetRollbackOnly(TransactionSynchronizationManager synchro }); } - /* - * (non-Javadoc) - * @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doCleanupAfterCompletion(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object) - */ @Override protected Mono doCleanupAfterCompletion(TransactionSynchronizationManager synchronizationManager, Object transaction) { @@ -317,7 +300,7 @@ protected Mono doCleanupAfterCompletion(TransactionSynchronizationManager */ public void setDatabaseFactory(ReactiveMongoDatabaseFactory databaseFactory) { - Assert.notNull(databaseFactory, "DatabaseFactory must not be null!"); + Assert.notNull(databaseFactory, "DatabaseFactory must not be null"); this.databaseFactory = databaseFactory; } @@ -327,7 +310,7 @@ public void setDatabaseFactory(ReactiveMongoDatabaseFactory databaseFactory) { * @param options can be {@literal null}. */ public void setOptions(@Nullable TransactionOptions options) { - this.options = options; + this.options = MongoTransactionOptions.of(options); } /** @@ -340,10 +323,6 @@ public ReactiveMongoDatabaseFactory getDatabaseFactory() { return databaseFactory; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() - */ @Override public void afterPropertiesSet() { getRequiredDatabaseFactory(); @@ -363,7 +342,7 @@ private Mono newResourceHolder(TransactionDefinitio private ReactiveMongoDatabaseFactory getRequiredDatabaseFactory() { Assert.state(databaseFactory != null, - "ReactiveMongoTransactionManager operates upon a ReactiveMongoDatabaseFactory. Did you forget to provide one? It's required."); + "ReactiveMongoTransactionManager operates upon a ReactiveMongoDatabaseFactory; Did you forget to provide one; It's required"); return databaseFactory; } @@ -401,7 +380,7 @@ private static String debugString(@Nullable ClientSession session) { debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent()); debugString += String.format("txActive = %s, ", session.hasActiveTransaction()); debugString += String.format("txNumber = %d, ", session.getServerSession().getTransactionNumber()); - debugString += String.format("closed = %d, ", session.getServerSession().isClosed()); + debugString += String.format("closed = %b, ", session.getServerSession().isClosed()); debugString += String.format("clusterTime = %s", session.getClusterTime()); } else { debugString += "id = n/a"; @@ -498,30 +477,22 @@ public ClientSession getSession() { private ReactiveMongoResourceHolder getRequiredResourceHolder() { - Assert.state(resourceHolder != null, "ReactiveMongoResourceHolder is required but not present. o_O"); + Assert.state(resourceHolder != null, "ReactiveMongoResourceHolder is required but not present; o_O"); return resourceHolder; } private ClientSession getRequiredSession() { ClientSession session = getSession(); - Assert.state(session != null, "A Session is required but it turned out to be null."); + Assert.state(session != null, "A Session is required but it turned out to be null"); return session; } - /* - * (non-Javadoc) - * @see org.springframework.transaction.support.SmartTransactionObject#isRollbackOnly() - */ @Override public boolean isRollbackOnly() { return this.resourceHolder != null && this.resourceHolder.isRollbackOnly(); } - /* - * (non-Javadoc) - * @see org.springframework.transaction.support.SmartTransactionObject#flush() - */ @Override public void flush() { throw new UnsupportedOperationException("flush() not supported"); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java index da48f22154..93dbf5db69 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -35,7 +35,7 @@ /** * {@link MethodInterceptor} implementation looking up and invoking an alternative target method having * {@link ClientSession} as its first argument. This allows seamless integration with the existing code base. - *

+ *
* The {@link MethodInterceptor} is aware of methods on {@code MongoCollection} that my return new instances of itself * like (eg. {@link com.mongodb.reactivestreams.client.MongoCollection#withWriteConcern(WriteConcern)} and decorate them * if not already proxied. @@ -76,13 +76,13 @@ public SessionAwareMethodInterceptor(ClientSession session, T target, Class< Class databaseType, ClientSessionOperator databaseDecorator, Class collectionType, ClientSessionOperator collectionDecorator) { - Assert.notNull(session, "ClientSession must not be null!"); - Assert.notNull(target, "Target must not be null!"); - Assert.notNull(sessionType, "SessionType must not be null!"); - Assert.notNull(databaseType, "Database type must not be null!"); - Assert.notNull(databaseDecorator, "Database ClientSessionOperator must not be null!"); - Assert.notNull(collectionType, "Collection type must not be null!"); - Assert.notNull(collectionDecorator, "Collection ClientSessionOperator must not be null!"); + Assert.notNull(session, "ClientSession must not be null"); + Assert.notNull(target, "Target must not be null"); + Assert.notNull(sessionType, "SessionType must not be null"); + Assert.notNull(databaseType, "Database type must not be null"); + Assert.notNull(databaseDecorator, "Database ClientSessionOperator must not be null"); + Assert.notNull(collectionType, "Collection type must not be null"); + Assert.notNull(collectionDecorator, "Collection ClientSessionOperator must not be null"); this.session = session; this.target = target; @@ -95,10 +95,6 @@ public SessionAwareMethodInterceptor(ClientSession session, T target, Class< this.sessionType = sessionType; } - /* - * (non-Javadoc) - * @see org.aopalliance.intercept.MethodInterceptor(org.aopalliance.intercept.MethodInvocation) - */ @Nullable @Override public Object invoke(MethodInvocation methodInvocation) throws Throwable { @@ -139,12 +135,8 @@ protected Object decorate(Object target) { private static boolean requiresSession(Method method) { - if (method.getParameterCount() == 0 - || !ClassUtils.isAssignable(ClientSession.class, method.getParameterTypes()[0])) { - return true; - } - - return false; + return method.getParameterCount() == 0 + || !ClassUtils.isAssignable(ClientSession.class, method.getParameterTypes()[0]); } private static Object[] prependSessionToArguments(ClientSession session, MethodInvocation invocation) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java index 2223b82391..07b5c31586 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,13 +15,20 @@ */ package org.springframework.data.mongodb; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; + /** - * {@link SessionSynchronization} is used along with {@link org.springframework.data.mongodb.core.MongoTemplate} to - * define in which type of transactions to participate if any. + * {@link SessionSynchronization} is used along with {@code MongoTemplate} to define in which type of transactions to + * participate if any. * * @author Christoph Strobl * @author Mark Paluch * @since 2.1 + * @see MongoTemplate#setSessionSynchronization(SessionSynchronization) + * @see MongoDatabaseUtils#getDatabase(MongoDatabaseFactory, SessionSynchronization) + * @see ReactiveMongoTemplate#setSessionSynchronization(SessionSynchronization) + * @see ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory, SessionSynchronization) */ public enum SessionSynchronization { @@ -34,5 +41,12 @@ public enum SessionSynchronization { /** * Synchronize with native MongoDB transactions initiated via {@link MongoTransactionManager}. */ - ON_ACTUAL_TRANSACTION; + ON_ACTUAL_TRANSACTION, + + /** + * Do not participate in ongoing transactions. + * + * @since 3.2.5 + */ + NEVER } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SimpleMongoTransactionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SimpleMongoTransactionOptions.java new file mode 100644 index 0000000000..b52fc0bd71 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SimpleMongoTransactionOptions.java @@ -0,0 +1,154 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.time.Duration; +import java.util.Arrays; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.Function; +import com.mongodb.ReadConcern; +import com.mongodb.ReadConcernLevel; +import com.mongodb.ReadPreference; +import com.mongodb.WriteConcern; + +/** + * Trivial implementation of {@link MongoTransactionOptions}. + * + * @author Christoph Strobl + * @since 4.3 + */ +class SimpleMongoTransactionOptions implements MongoTransactionOptions { + + static final Set KNOWN_KEYS = Arrays.stream(OptionKey.values()).map(OptionKey::getKey) + .collect(Collectors.toSet()); + + private final Duration maxCommitTime; + private final ReadConcern readConcern; + private final ReadPreference readPreference; + private final WriteConcern writeConcern; + + static SimpleMongoTransactionOptions of(Map options) { + return new SimpleMongoTransactionOptions(options); + } + + private SimpleMongoTransactionOptions(Map options) { + + this.maxCommitTime = doGetMaxCommitTime(options); + this.readConcern = doGetReadConcern(options); + this.readPreference = doGetReadPreference(options); + this.writeConcern = doGetWriteConcern(options); + } + + @Nullable + @Override + public Duration getMaxCommitTime() { + return maxCommitTime; + } + + @Nullable + @Override + public ReadConcern getReadConcern() { + return readConcern; + } + + @Nullable + @Override + public ReadPreference getReadPreference() { + return readPreference; + } + + @Nullable + @Override + public WriteConcern getWriteConcern() { + return writeConcern; + } + + @Override + public String toString() { + + return "DefaultMongoTransactionOptions{" + "maxCommitTime=" + maxCommitTime + ", readConcern=" + readConcern + + ", readPreference=" + readPreference + ", writeConcern=" + writeConcern + '}'; + } + + @Nullable + private static Duration doGetMaxCommitTime(Map options) { + + return getValue(options, OptionKey.MAX_COMMIT_TIME, value -> { + + Duration timeout = Duration.parse(value); + Assert.isTrue(!timeout.isNegative(), "%s cannot be negative".formatted(OptionKey.MAX_COMMIT_TIME)); + return timeout; + }); + } + + @Nullable + private static ReadConcern doGetReadConcern(Map options) { + return getValue(options, OptionKey.READ_CONCERN, value -> new ReadConcern(ReadConcernLevel.fromString(value))); + } + + @Nullable + private static ReadPreference doGetReadPreference(Map options) { + return getValue(options, OptionKey.READ_PREFERENCE, ReadPreference::valueOf); + } + + @Nullable + private static WriteConcern doGetWriteConcern(Map options) { + + return getValue(options, OptionKey.WRITE_CONCERN, value -> { + + WriteConcern writeConcern = WriteConcern.valueOf(value); + if (writeConcern == null) { + throw new IllegalArgumentException("'%s' is not a valid WriteConcern".formatted(options.get("writeConcern"))); + } + return writeConcern; + }); + } + + @Nullable + private static T getValue(Map options, OptionKey key, Function convertFunction) { + + String value = options.get(key.getKey()); + return value != null ? convertFunction.apply(value) : null; + } + + enum OptionKey { + + MAX_COMMIT_TIME("maxCommitTime"), READ_CONCERN("readConcern"), READ_PREFERENCE("readPreference"), WRITE_CONCERN( + "writeConcern"); + + final String key; + + OptionKey(String key) { + this.key = key; + } + + public String getKey() { + return key; + } + + @Override + public String toString() { + return getKey(); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java index dbbf146fc1..a3d600270f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 the original author or authors. + * Copyright 2020-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,8 +15,8 @@ */ package org.springframework.data.mongodb; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.springframework.data.util.Version; import org.springframework.util.StringUtils; @@ -31,7 +31,7 @@ */ public class SpringDataMongoDB { - private static final Logger LOGGER = LoggerFactory.getLogger(SpringDataMongoDB.class); + private static final Log LOGGER = LogFactory.getLog(SpringDataMongoDB.class); private static final Version FALLBACK_VERSION = new Version(3); private static final MongoDriverInformation DRIVER_INFORMATION = MongoDriverInformation @@ -48,7 +48,7 @@ public static MongoDriverInformation driverInformation() { /** * Fetches the "Implementation-Version" manifest attribute from the jar file. - *

+ *
* Note that some ClassLoaders do not expose the package metadata, hence this class might not be able to determine the * version in all environments. In this case the current Major version is returned as a fallback. * @@ -68,7 +68,7 @@ public static Version version() { try { return Version.parse(versionString); } catch (Exception e) { - LOGGER.debug("Cannot read Spring Data MongoDB version '{}'.", versionString); + LOGGER.debug(String.format("Cannot read Spring Data MongoDB version '%s'.", versionString)); } return FALLBACK_VERSION; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionMetadata.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionMetadata.java new file mode 100644 index 0000000000..cd5f58d5b1 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionMetadata.java @@ -0,0 +1,42 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.time.Duration; + +import org.springframework.lang.Nullable; + +/** + * MongoDB-specific transaction metadata. + * + * @author Christoph Strobl + * @since 4.3 + */ +public interface TransactionMetadata { + + /** + * @return the maximum commit time. Can be {@literal null} if not configured. + */ + @Nullable + Duration getMaxCommitTime(); + + /** + * @return {@literal true} if the max commit time is configured; {@literal false} otherwise. + */ + default boolean hasMaxCommitTime() { + return getMaxCommitTime() != null; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionOptionResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionOptionResolver.java new file mode 100644 index 0000000000..37c7e3686b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionOptionResolver.java @@ -0,0 +1,38 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.lang.Nullable; +import org.springframework.transaction.TransactionDefinition; + +/** + * Interface that defines a resolver for {@link TransactionMetadata} based on a {@link TransactionDefinition}. + * Transaction metadata is used to enrich the MongoDB transaction with additional information. + * + * @author Christoph Strobl + * @since 4.3 + */ +interface TransactionOptionResolver { + + /** + * Resolves the transaction metadata from a given {@link TransactionDefinition}. + * + * @param definition the {@link TransactionDefinition}. + * @return the resolved {@link TransactionMetadata} or {@literal null} if the resolver cannot resolve any metadata. + */ + @Nullable + T resolve(TransactionDefinition definition); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientClientSessionException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientClientSessionException.java new file mode 100644 index 0000000000..5446170ff9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientClientSessionException.java @@ -0,0 +1,38 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.dao.TransientDataAccessException; + +/** + * {@link TransientDataAccessException} specific to MongoDB {@link com.mongodb.session.ClientSession} related data + * access failures such as reading data using an already closed session. + * + * @author Christoph Strobl + * @since 4.4 + */ +public class TransientClientSessionException extends TransientMongoDbException { + + /** + * Constructor for {@link TransientClientSessionException}. + * + * @param msg the detail message. + * @param cause the root cause. + */ + public TransientClientSessionException(String msg, Throwable cause) { + super(msg, cause); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientMongoDbException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientMongoDbException.java new file mode 100644 index 0000000000..cad05ca17c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientMongoDbException.java @@ -0,0 +1,39 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.dao.TransientDataAccessException; + +/** + * Root of the hierarchy of MongoDB specific data access exceptions that are considered transient such as + * {@link com.mongodb.MongoException MongoExceptions} carrying {@link com.mongodb.MongoException#hasErrorLabel(String) + * specific labels}. + * + * @author Christoph Strobl + * @since 4.4 + */ +public class TransientMongoDbException extends TransientDataAccessException { + + /** + * Constructor for {@link TransientMongoDbException}. + * + * @param msg the detail message. + * @param cause the root cause. + */ + public TransientMongoDbException(String msg, Throwable cause) { + super(msg, cause); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/UncategorizedMongoDbException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/UncategorizedMongoDbException.java index 1148227786..bec05d0d68 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/UncategorizedMongoDbException.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/UncategorizedMongoDbException.java @@ -1,5 +1,5 @@ /* - * Copyright 2010-2021 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,12 +16,13 @@ package org.springframework.data.mongodb; import org.springframework.dao.UncategorizedDataAccessException; +import org.springframework.lang.Nullable; public class UncategorizedMongoDbException extends UncategorizedDataAccessException { private static final long serialVersionUID = -2336595514062364929L; - public UncategorizedMongoDbException(String msg, Throwable cause) { + public UncategorizedMongoDbException(String msg, @Nullable Throwable cause) { super(msg, cause); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/LazyLoadingProxyAotProcessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/LazyLoadingProxyAotProcessor.java new file mode 100644 index 0000000000..2254b3c9a8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/LazyLoadingProxyAotProcessor.java @@ -0,0 +1,105 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import java.lang.annotation.Annotation; +import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; + +import org.springframework.aot.generate.GenerationContext; +import org.springframework.aot.hint.TypeReference; +import org.springframework.core.annotation.AnnotatedElementUtils; +import org.springframework.core.annotation.MergedAnnotations; +import org.springframework.data.annotation.Reference; +import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory; +import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory.LazyLoadingInterceptor; +import org.springframework.data.mongodb.core.mapping.DBRef; +import org.springframework.data.mongodb.core.mapping.DocumentReference; + +/** + * @author Christoph Strobl + * @since 4.0 + */ +public class LazyLoadingProxyAotProcessor { + + private boolean generalLazyLoadingProxyContributed = false; + + public void registerLazyLoadingProxyIfNeeded(Class type, GenerationContext generationContext) { + + Set refFields = getFieldsWithAnnotationPresent(type, Reference.class); + if (refFields.isEmpty()) { + return; + } + + refFields.stream() // + .filter(LazyLoadingProxyAotProcessor::isLazyLoading) // + .forEach(field -> { + + if (!generalLazyLoadingProxyContributed) { + generationContext.getRuntimeHints().proxies().registerJdkProxy( + TypeReference.of(org.springframework.data.mongodb.core.convert.LazyLoadingProxy.class), + TypeReference.of(org.springframework.aop.SpringProxy.class), + TypeReference.of(org.springframework.aop.framework.Advised.class), + TypeReference.of(org.springframework.core.DecoratingProxy.class)); + generalLazyLoadingProxyContributed = true; + } + + if (field.getType().isInterface()) { + + List> interfaces = new ArrayList<>( + Arrays.asList(LazyLoadingProxyFactory.prepareFactory(field.getType()).getProxiedInterfaces())); + interfaces.add(org.springframework.aop.SpringProxy.class); + interfaces.add(org.springframework.aop.framework.Advised.class); + interfaces.add(org.springframework.core.DecoratingProxy.class); + + generationContext.getRuntimeHints().proxies().registerJdkProxy(interfaces.toArray(Class[]::new)); + } else { + + Class proxyClass = LazyLoadingProxyFactory.resolveProxyType(field.getType(), + LazyLoadingInterceptor::none); + + // see: spring-projects/spring-framework/issues/29309 + generationContext.getRuntimeHints().reflection().registerType(proxyClass, MongoAotReflectionHelper::cglibProxyReflectionMemberAccess); + } + }); + } + + private static boolean isLazyLoading(Field field) { + if (AnnotatedElementUtils.isAnnotated(field, DBRef.class)) { + return AnnotatedElementUtils.findMergedAnnotation(field, DBRef.class).lazy(); + } + if (AnnotatedElementUtils.isAnnotated(field, DocumentReference.class)) { + return AnnotatedElementUtils.findMergedAnnotation(field, DocumentReference.class).lazy(); + } + return false; + } + + private static Set getFieldsWithAnnotationPresent(Class type, Class annotation) { + + Set fields = new LinkedHashSet<>(); + for (Field field : type.getDeclaredFields()) { + if (MergedAnnotations.from(field).get(annotation).isPresent()) { + fields.add(field); + } + } + return fields; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotPredicates.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotPredicates.java new file mode 100644 index 0000000000..2fe27a2c9e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotPredicates.java @@ -0,0 +1,68 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import java.util.function.Predicate; + +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.util.ReactiveWrappers; +import org.springframework.data.util.ReactiveWrappers.ReactiveLibrary; +import org.springframework.data.util.TypeUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; + +/** + * Collection of {@link Predicate predicates} to determine dynamic library aspects during AOT computation. Intended for + * internal usage only. + * + * @author Christoph Strobl + * @since 4.0 + */ +public class MongoAotPredicates { + + public static final Predicate> IS_SIMPLE_TYPE = (type) -> MongoSimpleTypes.HOLDER.isSimpleType(type) + || TypeUtils.type(type).isPartOf("org.bson"); + public static final Predicate IS_REACTIVE_LIBARARY_AVAILABLE = ReactiveWrappers::isAvailable; + public static final Predicate IS_SYNC_CLIENT_PRESENT = (classLoader) -> ClassUtils + .isPresent("com.mongodb.client.MongoClient", classLoader); + public static final Predicate IS_REACTIVE_CLIENT_PRESENT = (classLoader) -> ClassUtils + .isPresent("com.mongodb.reactivestreams.client.MongoClient", classLoader); + + /** + * @return {@literal true} if the Project Reactor is present. + */ + public static boolean isReactorPresent() { + return IS_REACTIVE_LIBARARY_AVAILABLE.test(ReactiveWrappers.ReactiveLibrary.PROJECT_REACTOR); + } + + /** + * @param classLoader can be {@literal null}. + * @return {@literal true} if the {@link com.mongodb.client.MongoClient} is present. + * @since 4.0 + */ + public static boolean isSyncClientPresent(@Nullable ClassLoader classLoader) { + return IS_SYNC_CLIENT_PRESENT.test(classLoader); + } + + /** + * @param classLoader can be {@literal null}. + * @return {@literal true} if the {@link com.mongodb.reactivestreams.client.MongoClient} is present. + * @since 4.3 + */ + public static boolean isReactiveClientPresent(@Nullable ClassLoader classLoader) { + return IS_REACTIVE_CLIENT_PRESENT.test(classLoader); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotReflectionHelper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotReflectionHelper.java new file mode 100644 index 0000000000..ff8d04b382 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotReflectionHelper.java @@ -0,0 +1,31 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import org.springframework.aot.hint.MemberCategory; +import org.springframework.aot.hint.TypeHint.Builder; + +/** + * @author Christoph Strobl + */ +public final class MongoAotReflectionHelper { + + public static void cglibProxyReflectionMemberAccess(Builder builder) { + + builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, MemberCategory.INVOKE_DECLARED_METHODS, + MemberCategory.DECLARED_FIELDS); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoManagedTypesBeanRegistrationAotProcessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoManagedTypesBeanRegistrationAotProcessor.java new file mode 100644 index 0000000000..a33f20ffb6 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoManagedTypesBeanRegistrationAotProcessor.java @@ -0,0 +1,56 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import org.springframework.aot.generate.GenerationContext; +import org.springframework.core.ResolvableType; +import org.springframework.data.aot.ManagedTypesBeanRegistrationAotProcessor; +import org.springframework.data.mongodb.MongoManagedTypes; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; + +/** + * @author Christoph Strobl + * @since 2022/06 + */ +class MongoManagedTypesBeanRegistrationAotProcessor extends ManagedTypesBeanRegistrationAotProcessor { + + private final LazyLoadingProxyAotProcessor lazyLoadingProxyAotProcessor = new LazyLoadingProxyAotProcessor(); + + public MongoManagedTypesBeanRegistrationAotProcessor() { + setModuleIdentifier("mongo"); + } + + @Override + protected boolean isMatch(@Nullable Class beanType, @Nullable String beanName) { + return isMongoManagedTypes(beanType) || super.isMatch(beanType, beanName); + } + + protected boolean isMongoManagedTypes(@Nullable Class beanType) { + return beanType != null && ClassUtils.isAssignable(MongoManagedTypes.class, beanType); + } + + @Override + protected void contributeType(ResolvableType type, GenerationContext generationContext) { + + if (MongoAotPredicates.IS_SIMPLE_TYPE.test(type.toClass())) { + return; + } + + super.contributeType(type, generationContext); + lazyLoadingProxyAotProcessor.registerLazyLoadingProxyIfNeeded(type.toClass(), generationContext); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoRuntimeHints.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoRuntimeHints.java new file mode 100644 index 0000000000..538fe4e812 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoRuntimeHints.java @@ -0,0 +1,129 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import static org.springframework.data.mongodb.aot.MongoAotPredicates.*; + +import java.util.Arrays; + +import org.springframework.aot.hint.MemberCategory; +import org.springframework.aot.hint.RuntimeHints; +import org.springframework.aot.hint.RuntimeHintsRegistrar; +import org.springframework.aot.hint.TypeReference; +import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; + +import com.mongodb.MongoClientSettings; +import com.mongodb.ServerAddress; +import com.mongodb.UnixServerAddress; +import com.mongodb.client.MapReduceIterable; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.IndexOptions; +import com.mongodb.reactivestreams.client.MapReducePublisher; + +/** + * {@link RuntimeHintsRegistrar} for repository types and entity callbacks. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 4.0 + */ +class MongoRuntimeHints implements RuntimeHintsRegistrar { + + @Override + public void registerHints(RuntimeHints hints, @Nullable ClassLoader classLoader) { + + hints.reflection().registerTypes( + Arrays.asList(TypeReference.of(BeforeConvertCallback.class), TypeReference.of(BeforeSaveCallback.class), + TypeReference.of(AfterConvertCallback.class), TypeReference.of(AfterSaveCallback.class)), + builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, + MemberCategory.INVOKE_PUBLIC_METHODS)); + + registerTransactionProxyHints(hints, classLoader); + registerMongoCompatibilityAdapterHints(hints, classLoader); + + if (isReactorPresent()) { + + hints.reflection() + .registerTypes(Arrays.asList(TypeReference.of(ReactiveBeforeConvertCallback.class), + TypeReference.of(ReactiveBeforeSaveCallback.class), TypeReference.of(ReactiveAfterConvertCallback.class), + TypeReference.of(ReactiveAfterSaveCallback.class)), + builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, + MemberCategory.INVOKE_PUBLIC_METHODS)); + } + } + + private static void registerTransactionProxyHints(RuntimeHints hints, @Nullable ClassLoader classLoader) { + + if (MongoAotPredicates.isSyncClientPresent(classLoader) + && ClassUtils.isPresent("org.springframework.aop.SpringProxy", classLoader)) { + + hints.proxies().registerJdkProxy(TypeReference.of("com.mongodb.client.MongoDatabase"), + TypeReference.of("org.springframework.aop.SpringProxy"), + TypeReference.of("org.springframework.core.DecoratingProxy")); + hints.proxies().registerJdkProxy(TypeReference.of("com.mongodb.client.MongoCollection"), + TypeReference.of("org.springframework.aop.SpringProxy"), + TypeReference.of("org.springframework.core.DecoratingProxy")); + } + } + + @SuppressWarnings("deprecation") + private static void registerMongoCompatibilityAdapterHints(RuntimeHints hints, @Nullable ClassLoader classLoader) { + + hints.reflection() // + .registerType(MongoClientSettings.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(MongoClientSettings.Builder.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(IndexOptions.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(ServerAddress.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(UnixServerAddress.class, MemberCategory.INVOKE_PUBLIC_METHODS) // + .registerType(TypeReference.of("com.mongodb.connection.StreamFactoryFactory"), + MemberCategory.INTROSPECT_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.internal.connection.StreamFactoryFactory"), + MemberCategory.INTROSPECT_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.internal.build.MongoDriverVersion"), MemberCategory.PUBLIC_FIELDS); + + if (MongoAotPredicates.isSyncClientPresent(classLoader)) { + + hints.reflection() // + .registerType(MongoDatabase.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.client.internal.MongoDatabaseImpl"), + MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(MapReduceIterable.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.client.internal.MapReduceIterableImpl"), + MemberCategory.INVOKE_PUBLIC_METHODS); + } + + if (MongoAotPredicates.isReactiveClientPresent(classLoader)) { + + hints.reflection() // + .registerType(com.mongodb.reactivestreams.client.MongoDatabase.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.reactivestreams.client.internal.MongoDatabaseImpl"), + MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(MapReducePublisher.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.reactivestreams.client.internal.MapReducePublisherImpl"), + MemberCategory.INVOKE_PUBLIC_METHODS); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoClientConfiguration.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoClientConfiguration.java index 96df484aad..93033417fb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoClientConfiguration.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoClientConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,9 +25,7 @@ import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoCustomConversions; -import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.lang.Nullable; import com.mongodb.MongoClientSettings; import com.mongodb.MongoClientSettings.Builder; @@ -80,30 +78,12 @@ public MongoDatabaseFactory mongoDbFactory() { return new SimpleMongoClientDatabaseFactory(mongoClient(), getDatabaseName()); } - /** - * Return the base package to scan for mapped {@link Document}s. Will return the package name of the configuration - * class' (the concrete class, not this one here) by default. So if you have a {@code com.acme.AppConfig} extending - * {@link AbstractMongoClientConfiguration} the base package will be considered {@code com.acme} unless the method is - * overridden to implement alternate behavior. - * - * @return the base package to scan for mapped {@link Document} classes or {@literal null} to not enable scanning for - * entities. - * @deprecated use {@link #getMappingBasePackages()} instead. - */ - @Deprecated - @Nullable - protected String getMappingBasePackage() { - - Package mappingBasePackage = getClass().getPackage(); - return mappingBasePackage == null ? null : mappingBasePackage.getName(); - } - /** * Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and - * {@link #mongoMappingContext(MongoCustomConversions)}. Will get {@link #customConversions()} applied. + * {@link #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes)}. Will get {@link #customConversions()} applied. * * @see #customConversions() - * @see #mongoMappingContext(MongoCustomConversions) + * @see #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes) * @see #mongoDbFactory() */ @Bean diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java index a2a1f74b08..f93c4ae708 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -84,10 +84,10 @@ public ReactiveMongoDatabaseFactory reactiveMongoDbFactory() { /** * Creates a {@link MappingMongoConverter} using the configured {@link #reactiveMongoDbFactory()} and - * {@link #mongoMappingContext(MongoCustomConversions)}. Will get {@link #customConversions()} applied. + * {@link #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes)}. Will get {@link #customConversions()} applied. * * @see #customConversions() - * @see #mongoMappingContext(MongoCustomConversions) + * @see #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes) * @see #reactiveMongoDbFactory() * @return never {@literal null}. */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/BeanNames.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/BeanNames.java index dd32d9bb70..584fbfba30 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/BeanNames.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/BeanNames.java @@ -1,5 +1,5 @@ /* - * Copyright 2011-2021 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ConnectionStringPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ConnectionStringPropertyEditor.java index 2141063afc..b070a0190f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ConnectionStringPropertyEditor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ConnectionStringPropertyEditor.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2021 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,10 +30,6 @@ */ public class ConnectionStringPropertyEditor extends PropertyEditorSupport { - /* - * (non-Javadoc) - * @see java.beans.PropertyEditorSupport#setAsText(java.lang.String) - */ @Override public void setAsText(@Nullable String connectionString) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableMongoAuditing.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableMongoAuditing.java index 022ddb3036..d6ce19f3ee 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableMongoAuditing.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableMongoAuditing.java @@ -1,5 +1,5 @@ /* - * Copyright 2013-2021 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableReactiveMongoAuditing.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableReactiveMongoAuditing.java index c67d2cd833..21fadf86c6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableReactiveMongoAuditing.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableReactiveMongoAuditing.java @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 the original author or authors. + * Copyright 2020-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GeoJsonConfiguration.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GeoJsonConfiguration.java index dcfe70b367..3b10019cc0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GeoJsonConfiguration.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GeoJsonConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2021 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GridFsTemplateParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GridFsTemplateParser.java index d6653e8999..b86da91dad 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GridFsTemplateParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GridFsTemplateParser.java @@ -1,5 +1,5 @@ /* - * Copyright 2013-2021 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,10 +34,6 @@ */ class GridFsTemplateParser extends AbstractBeanDefinitionParser { - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext) throws BeanDefinitionStoreException { @@ -46,10 +42,6 @@ protected String resolveId(Element element, AbstractBeanDefinition definition, P return StringUtils.hasText(id) ? id : BeanNames.GRID_FS_TEMPLATE_BEAN_NAME; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java index 0e825afe1a..164b4defb6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java @@ -1,5 +1,5 @@ /* - * Copyright 2011-2021 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,13 +18,10 @@ import static org.springframework.data.mongodb.config.BeanNames.*; import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; import java.util.List; import java.util.Set; import org.springframework.beans.BeanMetadataElement; -import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanDefinitionHolder; import org.springframework.beans.factory.config.RuntimeBeanReference; @@ -62,6 +59,7 @@ import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; import org.springframework.util.xml.DomUtils; import org.w3c.dom.Element; @@ -76,11 +74,12 @@ * @author Christoph Strobl * @author Mark Paluch * @author Zied Yaich + * @author Tomasz Forys */ public class MappingMongoConverterParser implements BeanDefinitionParser { private static final String BASE_PACKAGE = "base-package"; - private static final boolean JSR_303_PRESENT = ClassUtils.isPresent("javax.validation.Validator", + private static final boolean JSR_303_PRESENT = ClassUtils.isPresent("jakarta.validation.Validator", MappingMongoConverterParser.class.getClassLoader()); /* (non-Javadoc) @@ -96,13 +95,12 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { String id = element.getAttribute(AbstractBeanDefinitionParser.ID_ATTRIBUTE); id = StringUtils.hasText(id) ? id : DEFAULT_CONVERTER_BEAN_NAME; - String autoIndexCreation = element.getAttribute("auto-index-creation"); - boolean autoIndexCreationEnabled = StringUtils.hasText(autoIndexCreation) && Boolean.valueOf(autoIndexCreation); + boolean autoIndexCreationEnabled = isAutoIndexCreationEnabled(element); parserContext.pushContainingComponent(new CompositeComponentDefinition("Mapping Mongo Converter", element)); BeanDefinition conversionsDefinition = getCustomConversions(element, parserContext); - String ctxRef = potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, id); + String ctxRef = potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, id, autoIndexCreationEnabled); // Need a reference to a Mongo instance String dbFactoryRef = element.getAttribute("db-factory-ref"); @@ -135,9 +133,7 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { new BeanComponentDefinition(indexOperationsProviderBuilder.getBeanDefinition(), "indexOperationsProvider")); } - try { - registry.getBeanDefinition(INDEX_HELPER_BEAN_NAME); - } catch (NoSuchBeanDefinitionException ignored) { + if (!registry.containsBeanDefinition(INDEX_HELPER_BEAN_NAME)) { BeanDefinitionBuilder indexHelperBuilder = BeanDefinitionBuilder .genericBeanDefinition(MongoPersistentEntityIndexCreator.class); @@ -151,7 +147,7 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { BeanDefinition validatingMongoEventListener = potentiallyCreateValidatingMongoEventListener(element, parserContext); - if (validatingMongoEventListener != null) { + if (validatingMongoEventListener != null && !registry.containsBeanDefinition(VALIDATING_EVENT_LISTENER_BEAN_NAME)) { parserContext.registerBeanComponent( new BeanComponentDefinition(validatingMongoEventListener, VALIDATING_EVENT_LISTENER_BEAN_NAME)); } @@ -165,15 +161,16 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { private BeanDefinition potentiallyCreateValidatingMongoEventListener(Element element, ParserContext parserContext) { String disableValidation = element.getAttribute("disable-validation"); - boolean validationDisabled = StringUtils.hasText(disableValidation) && Boolean.valueOf(disableValidation); + boolean validationDisabled = StringUtils.hasText(disableValidation) && Boolean.parseBoolean(disableValidation); if (!validationDisabled) { BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(); - RuntimeBeanReference validator = getValidator(builder, parserContext); + RuntimeBeanReference validator = getValidator(element, parserContext); if (validator != null) { builder.getRawBeanDefinition().setBeanClass(ValidatingMongoEventListener.class); + builder.getRawBeanDefinition().setSource(element); builder.addConstructorArgValue(validator); return builder.getBeanDefinition(); @@ -195,16 +192,37 @@ private RuntimeBeanReference getValidator(Object source, ParserContext parserCon validatorDef.setSource(source); validatorDef.setRole(BeanDefinition.ROLE_INFRASTRUCTURE); String validatorName = parserContext.getReaderContext().registerWithGeneratedName(validatorDef); - parserContext.registerBeanComponent(new BeanComponentDefinition(validatorDef, validatorName)); return new RuntimeBeanReference(validatorName); } + private static boolean isAutoIndexCreationEnabled(Element element) { + + String autoIndexCreation = element.getAttribute("auto-index-creation"); + return StringUtils.hasText(autoIndexCreation) && Boolean.parseBoolean(autoIndexCreation); + } + + /** + * Create and register the {@link BeanDefinition} for a {@link MongoMappingContext} if not explicitly referenced by a + * given {@literal mapping-context-ref} {@link Element#getAttribute(String) attribuite}. + * + * @return the mapping context bean name. + * @deprecated since 4.3. Use + * {@link #potentiallyCreateMappingContext(Element, ParserContext, BeanDefinition, String, boolean)} + * instead. + */ + @Deprecated(since = "4.3", forRemoval = true) public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext, @Nullable BeanDefinition conversionsDefinition, @Nullable String converterId) { return potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, converterId, false); } + /** + * Create and register the {@link BeanDefinition} for a {@link MongoMappingContext} if not explicitly referenced by a + * given {@literal mapping-context-ref} {@link Element#getAttribute(String) attribuite}. + * + * @return the mapping context bean name. + */ public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext, @Nullable BeanDefinition conversionsDefinition, @Nullable String converterId, boolean autoIndexCreation) { @@ -255,7 +273,7 @@ private static void parseFieldNamingStrategy(Element element, ReaderContext cont && Boolean.parseBoolean(abbreviateFieldNames); if (fieldNamingStrategyReferenced && abbreviationActivated) { - context.error("Field name abbreviation cannot be activated if a field-naming-strategy-ref is configured!", + context.error("Field name abbreviation cannot be activated if a field-naming-strategy-ref is configured", element); return; } @@ -284,7 +302,7 @@ private BeanDefinition getCustomConversions(Element element, ParserContext parse ManagedList converterBeans = new ManagedList<>(); List converterElements = DomUtils.getChildElementsByTagName(customerConvertersElement, "converter"); - if (converterElements != null) { + if (!ObjectUtils.isEmpty(converterElements)) { for (Element listenerElement : converterElements) { converterBeans.add(parseConverter(listenerElement, parserContext)); } @@ -373,13 +391,9 @@ public NegatingFilter(TypeFilter... filters) { Assert.notNull(filters, "TypeFilters must not be null"); - this.delegates = new HashSet<>(Arrays.asList(filters)); + this.delegates = Set.of(filters); } - /* - * (non-Javadoc) - * @see org.springframework.core.type.filter.TypeFilter#match(org.springframework.core.type.classreading.MetadataReader, org.springframework.core.type.classreading.MetadataReaderFactory) - */ public boolean match(MetadataReader metadataReader, MetadataReaderFactory metadataReaderFactory) throws IOException { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingBeanDefinitionParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingBeanDefinitionParser.java index 241080afb1..4e05fe6c39 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingBeanDefinitionParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingBeanDefinitionParser.java @@ -1,5 +1,5 @@ /* - * Copyright 2012-2021 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,28 +47,16 @@ public class MongoAuditingBeanDefinitionParser extends AbstractSingleBeanDefinit private static boolean PROJECT_REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono", MongoAuditingRegistrar.class.getClassLoader()); - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element) - */ @Override protected Class getBeanClass(Element element) { return AuditingEntityCallback.class; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#shouldGenerateId() - */ @Override protected boolean shouldGenerateId() { return true; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#doParse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext, org.springframework.beans.factory.support.BeanDefinitionBuilder) - */ @Override protected void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingRegistrar.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingRegistrar.java index ff73015899..37e509a38a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingRegistrar.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingRegistrar.java @@ -1,5 +1,5 @@ /* - * Copyright 2013-2021 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,11 +18,10 @@ import java.lang.annotation.Annotation; import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.support.AbstractBeanDefinition; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.context.annotation.ImportBeanDefinitionRegistrar; -import org.springframework.core.type.AnnotationMetadata; +import org.springframework.core.Ordered; import org.springframework.data.auditing.IsNewAwareAuditingHandler; import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport; import org.springframework.data.auditing.config.AuditingConfiguration; @@ -36,68 +35,42 @@ * @author Thomas Darimont * @author Oliver Gierke * @author Mark Paluch + * @author Christoph Strobl */ -class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport { +class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport implements Ordered { - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation() - */ @Override protected Class getAnnotation() { return EnableMongoAuditing.class; } - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditingHandlerBeanName() - */ @Override protected String getAuditingHandlerBeanName() { return "mongoAuditingHandler"; } - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerBeanDefinitions(org.springframework.core.type.AnnotationMetadata, org.springframework.beans.factory.support.BeanDefinitionRegistry) - */ @Override - public void registerBeanDefinitions(AnnotationMetadata annotationMetadata, BeanDefinitionRegistry registry) { - - Assert.notNull(annotationMetadata, "AnnotationMetadata must not be null!"); - Assert.notNull(registry, "BeanDefinitionRegistry must not be null!"); + protected void postProcess(BeanDefinitionBuilder builder, AuditingConfiguration configuration, + BeanDefinitionRegistry registry) { - super.registerBeanDefinitions(annotationMetadata, registry); + builder.setFactoryMethod("from").addConstructorArgReference("mongoMappingContext"); } - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditHandlerBeanDefinitionBuilder(org.springframework.data.auditing.config.AuditingConfiguration) - */ @Override protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) { - Assert.notNull(configuration, "AuditingConfiguration must not be null!"); - - BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class); + Assert.notNull(configuration, "AuditingConfiguration must not be null"); - BeanDefinitionBuilder definition = BeanDefinitionBuilder.genericBeanDefinition(PersistentEntitiesFactoryBean.class); - definition.setAutowireMode(AbstractBeanDefinition.AUTOWIRE_CONSTRUCTOR); - - builder.addConstructorArgValue(definition.getBeanDefinition()); - return configureDefaultAuditHandlerAttributes(configuration, builder); + return configureDefaultAuditHandlerAttributes(configuration, + BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class)); } - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerAuditListener(org.springframework.beans.factory.config.BeanDefinition, org.springframework.beans.factory.support.BeanDefinitionRegistry) - */ @Override protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition, BeanDefinitionRegistry registry) { - Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null!"); - Assert.notNull(registry, "BeanDefinitionRegistry must not be null!"); + Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null"); + Assert.notNull(registry, "BeanDefinitionRegistry must not be null"); BeanDefinitionBuilder listenerBeanDefinitionBuilder = BeanDefinitionBuilder .rootBeanDefinition(AuditingEntityCallback.class); @@ -108,4 +81,8 @@ protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandle AuditingEntityCallback.class.getName(), registry); } + @Override + public int getOrder() { + return Ordered.LOWEST_PRECEDENCE; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoClientParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoClientParser.java index 91b89593c2..501c00b9d6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoClientParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoClientParser.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2021 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -35,10 +35,6 @@ */ public class MongoClientParser implements BeanDefinitionParser { - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.BeanDefinitionParser#parse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext) - */ public BeanDefinition parse(Element element, ParserContext parserContext) { Object source = parserContext.extractSource(element); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java index 52ec72d171..0594f6176c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,6 +30,7 @@ import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy; import org.springframework.data.mapping.model.FieldNamingStrategy; import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy; +import org.springframework.data.mongodb.MongoManagedTypes; import org.springframework.data.mongodb.core.convert.MongoCustomConversions; import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter; import org.springframework.data.mongodb.core.mapping.Document; @@ -76,14 +77,13 @@ protected Collection getMappingBasePackages() { * * @see #getMappingBasePackages() * @return - * @throws ClassNotFoundException */ @Bean - public MongoMappingContext mongoMappingContext(MongoCustomConversions customConversions) - throws ClassNotFoundException { + public MongoMappingContext mongoMappingContext(MongoCustomConversions customConversions, + MongoManagedTypes mongoManagedTypes) { MongoMappingContext mappingContext = new MongoMappingContext(); - mappingContext.setInitialEntitySet(getInitialEntitySet()); + mappingContext.setManagedTypes(mongoManagedTypes); mappingContext.setSimpleTypeHolder(customConversions.getSimpleTypeHolder()); mappingContext.setFieldNamingStrategy(fieldNamingStrategy()); mappingContext.setAutoIndexCreation(autoIndexCreation()); @@ -91,6 +91,16 @@ public MongoMappingContext mongoMappingContext(MongoCustomConversions customConv return mappingContext; } + /** + * @return new instance of {@link MongoManagedTypes}. + * @throws ClassNotFoundException + * @since 4.0 + */ + @Bean + public MongoManagedTypes mongoManagedTypes() throws ClassNotFoundException { + return MongoManagedTypes.fromIterable(getInitialEntitySet()); + } + /** * Register custom {@link Converter}s in a {@link CustomConversions} object if required. These * {@link CustomConversions} will be registered with the @@ -172,8 +182,7 @@ protected Set> scanForEntities(String basePackage) throws ClassNotFound /** * Configures whether to abbreviate field names for domain objects by configuring a - * {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. For advanced - * customization needs, consider overriding {@link #mappingMongoConverter()}. + * {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. * * @return */ @@ -197,7 +206,7 @@ protected FieldNamingStrategy fieldNamingStrategy() { * {@link org.springframework.data.mongodb.core.index.IndexDefinition} from the entity or not. * * @return {@literal false} by default.
- * INFO: As of 3.x the default is set to {@literal false}; In 2.x it was {@literal true}. + * INFO: As of 3.x the default is set to {@literal false}; In 2.x it was {@literal true}. * @since 2.2 */ protected boolean autoIndexCreation() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditor.java index 5991a683ef..b8f23a35af 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditor.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2021 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,9 +16,9 @@ package org.springframework.data.mongodb.config; import java.beans.PropertyEditorSupport; -import java.io.UnsupportedEncodingException; import java.lang.reflect.Method; import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -51,10 +51,6 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport { private static final String OPTIONS_DELIMITER = "?"; private static final String OPTION_VALUE_DELIMITER = "&"; - /* - * (non-Javadoc) - * @see java.beans.PropertyEditorSupport#setAsText(java.lang.String) - */ @Override public void setAsText(@Nullable String text) throws IllegalArgumentException { @@ -121,7 +117,7 @@ public void setAsText(@Nullable String text) throws IllegalArgumentException { userNameAndPassword[1].toCharArray())); } else { throw new IllegalArgumentException( - String.format("Cannot create MongoCredentials for unknown auth mechanism '%s'!", authMechanism)); + String.format("Cannot create MongoCredentials for unknown auth mechanism '%s'", authMechanism)); } } } else { @@ -198,7 +194,7 @@ private static Properties extractOptions(String text) { String[] optionArgs = option.split("="); if (optionArgs.length == 1) { - throw new IllegalArgumentException(String.format("Query parameter '%s' has no value!", optionArgs[0])); + throw new IllegalArgumentException(String.format("Query parameter '%s' has no value", optionArgs[0])); } properties.put(optionArgs[0], optionArgs[1]); @@ -213,29 +209,25 @@ private static void verifyUsernameAndPasswordPresent(String[] source) { if (source.length != 2) { throw new IllegalArgumentException( - "Credentials need to specify username and password like in 'username:password@database'!"); + "Credentials need to specify username and password like in 'username:password@database'"); } } private static void verifyDatabasePresent(String source) { if (!StringUtils.hasText(source)) { - throw new IllegalArgumentException("Credentials need to specify database like in 'username:password@database'!"); + throw new IllegalArgumentException("Credentials need to specify database like in 'username:password@database'"); } } private static void verifyUserNamePresent(String[] source) { if (source.length == 0 || !StringUtils.hasText(source[0])) { - throw new IllegalArgumentException("Credentials need to specify username!"); + throw new IllegalArgumentException("Credentials need to specify username"); } } private static String decodeParameter(String it) { - try { - return URLDecoder.decode(it, "UTF-8"); - } catch (UnsupportedEncodingException e) { - throw new IllegalArgumentException("o_O UTF-8 not supported!", e); - } + return URLDecoder.decode(it, StandardCharsets.UTF_8); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoDbFactoryParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoDbFactoryParser.java index d4e1c65358..2e733cc79f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoDbFactoryParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoDbFactoryParser.java @@ -1,5 +1,5 @@ /* - * Copyright 2011-2021 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,8 +18,6 @@ import static org.springframework.data.config.ParsingUtils.*; import static org.springframework.data.mongodb.config.MongoParsingUtils.*; -import java.util.Collections; -import java.util.HashSet; import java.util.Set; import org.springframework.beans.factory.BeanDefinitionStoreException; @@ -51,21 +49,8 @@ */ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser { - private static final Set MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES; + private static final Set MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES = Set.of("id", "write-concern"); - static { - - Set mongoUriAllowedAdditionalAttributes = new HashSet(); - mongoUriAllowedAdditionalAttributes.add("id"); - mongoUriAllowedAdditionalAttributes.add("write-concern"); - - MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES = Collections.unmodifiableSet(mongoUriAllowedAdditionalAttributes); - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext) throws BeanDefinitionStoreException { @@ -74,10 +59,6 @@ protected String resolveId(Element element, AbstractBeanDefinition definition, P return StringUtils.hasText(id) ? id : BeanNames.DB_FACTORY_BEAN_NAME; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) { @@ -171,7 +152,7 @@ private BeanDefinition getConnectionString(Element element, ParserContext parser if (element.getAttributes().getLength() > allowedAttributesCount) { - parserContext.getReaderContext().error("Configure either MongoDB " + type + " or details individually!", + parserContext.getReaderContext().error("Configure either MongoDB " + type + " or details individually", parserContext.extractSource(element)); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoJmxParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoJmxParser.java index 8486f28864..af1ffbbb02 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoJmxParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoJmxParser.java @@ -1,5 +1,5 @@ /* - * Copyright 2011-2021 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,7 +32,9 @@ * @author John Brisbin * @author Oliver Gierke * @author Christoph Strobl + * @deprecated since 4.5 */ +@Deprecated(since = "4.5", forRemoval = true) public class MongoJmxParser implements BeanDefinitionParser { public BeanDefinition parse(Element element, ParserContext parserContext) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoNamespaceHandler.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoNamespaceHandler.java index 5bdb199a50..47519ca615 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoNamespaceHandler.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoNamespaceHandler.java @@ -1,5 +1,5 @@ /* - * Copyright 2011-2021 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,10 +26,6 @@ */ public class MongoNamespaceHandler extends NamespaceHandlerSupport { - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.NamespaceHandler#init() - */ public void init() { registerBeanDefinitionParser("mapping-converter", new MappingMongoConverterParser()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java index cd4d16d91b..95b56b58f3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java @@ -1,5 +1,5 @@ /* - * Copyright 2011-2021 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,9 +22,12 @@ import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.CustomEditorConfigurer; import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.beans.factory.support.BeanDefinitionValidationException; import org.springframework.beans.factory.support.ManagedMap; import org.springframework.beans.factory.xml.BeanDefinitionParser; import org.springframework.data.mongodb.core.MongoClientSettingsFactoryBean; +import org.springframework.data.mongodb.core.MongoServerApiFactoryBean; +import org.springframework.util.StringUtils; import org.springframework.util.xml.DomUtils; import org.w3c.dom.Element; @@ -37,7 +40,6 @@ * @author Christoph Strobl * @author Mark Paluch */ -@SuppressWarnings("deprecation") abstract class MongoParsingUtils { private MongoParsingUtils() {} @@ -112,6 +114,20 @@ public static boolean parseMongoClientSettings(Element element, BeanDefinitionBu // Field level encryption setPropertyReference(clientOptionsDefBuilder, settingsElement, "encryption-settings-ref", "autoEncryptionSettings"); + // ServerAPI + if (StringUtils.hasText(settingsElement.getAttribute("server-api-version"))) { + + MongoServerApiFactoryBean serverApiFactoryBean = new MongoServerApiFactoryBean(); + serverApiFactoryBean.setVersion(settingsElement.getAttribute("server-api-version")); + try { + clientOptionsDefBuilder.addPropertyValue("serverApi", serverApiFactoryBean.getObject()); + } catch (Exception exception) { + throw new BeanDefinitionValidationException("Non parsable server-api.", exception); + } + } else { + setPropertyReference(clientOptionsDefBuilder, settingsElement, "server-api-ref", "serverApi"); + } + // and the rest mongoClientBuilder.addPropertyValue("mongoClientSettings", clientOptionsDefBuilder.getBeanDefinition()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoTemplateParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoTemplateParser.java index dc02a1eee4..1e1b11356f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoTemplateParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoTemplateParser.java @@ -1,5 +1,5 @@ /* - * Copyright 2011-2021 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -39,10 +39,6 @@ */ class MongoTemplateParser extends AbstractBeanDefinitionParser { - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext) throws BeanDefinitionStoreException { @@ -51,10 +47,6 @@ protected String resolveId(Element element, AbstractBeanDefinition definition, P return StringUtils.hasText(id) ? id : BeanNames.MONGO_TEMPLATE_BEAN_NAME; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java index ba382a32cc..e46701a7f3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 the original author or authors. + * Copyright 2020-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -28,7 +28,7 @@ * @author Christoph Strobl * @since 3.1 */ -class PersistentEntitiesFactoryBean implements FactoryBean { +public class PersistentEntitiesFactoryBean implements FactoryBean { private final MappingMongoConverter converter; @@ -41,19 +41,11 @@ public PersistentEntitiesFactoryBean(MappingMongoConverter converter) { this.converter = converter; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObject() - */ @Override public PersistentEntities getObject() { return PersistentEntities.of(converter.getMappingContext()); } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ @Override public Class getObjectType() { return PersistentEntities.class; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReactiveMongoAuditingRegistrar.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReactiveMongoAuditingRegistrar.java index 01123cb7d6..80cf404434 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReactiveMongoAuditingRegistrar.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReactiveMongoAuditingRegistrar.java @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 the original author or authors. + * Copyright 2020-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,11 +18,9 @@ import java.lang.annotation.Annotation; import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.support.AbstractBeanDefinition; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.context.annotation.ImportBeanDefinitionRegistrar; -import org.springframework.core.type.AnnotationMetadata; import org.springframework.data.auditing.ReactiveIsNewAwareAuditingHandler; import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport; import org.springframework.data.auditing.config.AuditingConfiguration; @@ -34,56 +32,42 @@ * {@link ImportBeanDefinitionRegistrar} to enable {@link EnableReactiveMongoAuditing} annotation. * * @author Mark Paluch + * @author Christoph Strobl * @since 3.1 */ class ReactiveMongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport { - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation() - */ @Override protected Class getAnnotation() { return EnableReactiveMongoAuditing.class; } - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditingHandlerBeanName() - */ @Override protected String getAuditingHandlerBeanName() { return "reactiveMongoAuditingHandler"; } - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditHandlerBeanDefinitionBuilder(org.springframework.data.auditing.config.AuditingConfiguration) - */ @Override - protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) { - - Assert.notNull(configuration, "AuditingConfiguration must not be null!"); + protected void postProcess(BeanDefinitionBuilder builder, AuditingConfiguration configuration, + BeanDefinitionRegistry registry) { + builder.setFactoryMethod("from").addConstructorArgReference("mongoMappingContext"); + } - BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveIsNewAwareAuditingHandler.class); + @Override + protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) { - BeanDefinitionBuilder definition = BeanDefinitionBuilder.genericBeanDefinition(PersistentEntitiesFactoryBean.class); - definition.setAutowireMode(AbstractBeanDefinition.AUTOWIRE_CONSTRUCTOR); + Assert.notNull(configuration, "AuditingConfiguration must not be null"); - builder.addConstructorArgValue(definition.getBeanDefinition()); - return configureDefaultAuditHandlerAttributes(configuration, builder); + return configureDefaultAuditHandlerAttributes(configuration, + BeanDefinitionBuilder.rootBeanDefinition(ReactiveIsNewAwareAuditingHandler.class)); } - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerAuditListener(org.springframework.beans.factory.config.BeanDefinition, org.springframework.beans.factory.support.BeanDefinitionRegistry) - */ @Override protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition, BeanDefinitionRegistry registry) { - Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null!"); - Assert.notNull(registry, "BeanDefinitionRegistry must not be null!"); + Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null"); + Assert.notNull(registry, "BeanDefinitionRegistry must not be null"); BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadConcernPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadConcernPropertyEditor.java index c7b6479a24..60bf126ae7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadConcernPropertyEditor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadConcernPropertyEditor.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2021 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,10 +32,6 @@ */ public class ReadConcernPropertyEditor extends PropertyEditorSupport { - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.BeanDefinitionParser#parse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext) - */ @Override public void setAsText(@Nullable String readConcernString) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditor.java index acf952069f..5ed9b66619 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditor.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2021 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -29,10 +29,6 @@ */ public class ReadPreferencePropertyEditor extends PropertyEditorSupport { - /* - * (non-Javadoc) - * @see java.beans.PropertyEditorSupport#setAsText(java.lang.String) - */ @Override public void setAsText(@Nullable String readPreferenceString) throws IllegalArgumentException { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditor.java index 5e5d82f6be..9c51900902 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditor.java @@ -1,5 +1,5 @@ /* - * Copyright 2011-2021 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,8 +21,8 @@ import java.util.HashSet; import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -43,13 +43,9 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport { * A port is a number without a leading 0 at the end of the address that is proceeded by just a single :. */ private static final String HOST_PORT_SPLIT_PATTERN = "(? 2) { - LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source); + if(LOG.isWarnEnabled()) { + LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source)); + } return null; } @@ -105,9 +105,13 @@ private ServerAddress parseServerAddress(String source) { return port == null ? new ServerAddress(hostAddress) : new ServerAddress(hostAddress, port); } catch (UnknownHostException e) { - LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0]); + if(LOG.isWarnEnabled()) { + LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0])); + } } catch (NumberFormatException e) { - LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1]); + if(LOG.isWarnEnabled()) { + LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1])); + } } return null; @@ -121,7 +125,7 @@ private ServerAddress parseServerAddress(String source) { */ private String[] extractHostAddressAndPort(String addressAndPortSource) { - Assert.notNull(addressAndPortSource, "Address and port source must not be null!"); + Assert.notNull(addressAndPortSource, "Address and port source must not be null"); String[] hostAndPort = addressAndPortSource.split(HOST_PORT_SPLIT_PATTERN); String hostAddress = hostAndPort[0]; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/StringToWriteConcernConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/StringToWriteConcernConverter.java index 359e165061..9f579b8fe9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/StringToWriteConcernConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/StringToWriteConcernConverter.java @@ -1,5 +1,5 @@ /* - * Copyright 2012-2021 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,10 +26,6 @@ */ public class StringToWriteConcernConverter implements Converter { - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ public WriteConcern convert(String source) { WriteConcern writeConcern = WriteConcern.valueOf(source); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/UUidRepresentationPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/UUidRepresentationPropertyEditor.java index 5e9e6b39ee..b777969967 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/UUidRepresentationPropertyEditor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/UUidRepresentationPropertyEditor.java @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 the original author or authors. + * Copyright 2020-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -29,10 +29,6 @@ */ public class UUidRepresentationPropertyEditor extends PropertyEditorSupport { - /* - * (non-Javadoc) - * @see java.beans.PropertyEditorSupport#setAsText(java.lang.String) - */ @Override public void setAsText(@Nullable String value) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditor.java index 7bb77d8d8c..ee0d09e555 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditor.java @@ -1,5 +1,5 @@ /* - * Copyright 2011-2021 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/AggregationUtil.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/AggregationUtil.java index 605fa60663..a00d95a9ad 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/AggregationUtil.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/AggregationUtil.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,32 +15,22 @@ */ package org.springframework.data.mongodb.core; -import java.util.Arrays; -import java.util.Collections; import java.util.List; -import java.util.Optional; -import java.util.stream.Collectors; import org.bson.Document; + import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.aggregation.Aggregation; -import org.springframework.data.mongodb.core.aggregation.AggregationOperation; import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; -import org.springframework.data.mongodb.core.aggregation.AggregationOptions; import org.springframework.data.mongodb.core.aggregation.AggregationOptions.DomainTypeMapping; -import org.springframework.data.mongodb.core.aggregation.CountOperation; -import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.FieldLookupPolicy; import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.TypedAggregation; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.data.mongodb.core.query.CriteriaDefinition; -import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.util.Lazy; import org.springframework.lang.Nullable; -import org.springframework.util.Assert; -import org.springframework.util.ObjectUtils; /** * Utility methods to map {@link org.springframework.data.mongodb.core.aggregation.Aggregation} pipeline definitions and @@ -52,17 +42,17 @@ */ class AggregationUtil { - QueryMapper queryMapper; - MappingContext, MongoPersistentProperty> mappingContext; - Lazy untypedMappingContext; + final QueryMapper queryMapper; + final MappingContext, MongoPersistentProperty> mappingContext; + final Lazy untypedMappingContext; AggregationUtil(QueryMapper queryMapper, MappingContext, MongoPersistentProperty> mappingContext) { this.queryMapper = queryMapper; this.mappingContext = mappingContext; - this.untypedMappingContext = Lazy - .of(() -> new RelaxedTypeBasedAggregationOperationContext(Object.class, mappingContext, queryMapper)); + this.untypedMappingContext = Lazy.of(() -> new TypeBasedAggregationOperationContext(Object.class, mappingContext, + queryMapper, FieldLookupPolicy.relaxed())); } AggregationOperationContext createAggregationContext(Aggregation aggregation, @Nullable Class inputType) { @@ -73,27 +63,18 @@ AggregationOperationContext createAggregationContext(Aggregation aggregation, @N return Aggregation.DEFAULT_CONTEXT; } - if (!(aggregation instanceof TypedAggregation)) { - - if(inputType == null) { - return untypedMappingContext.get(); - } + FieldLookupPolicy lookupPolicy = domainTypeMapping == DomainTypeMapping.STRICT + && !aggregation.getPipeline().containsUnionWith() ? FieldLookupPolicy.strict() : FieldLookupPolicy.relaxed(); - if (domainTypeMapping == DomainTypeMapping.STRICT - && !aggregation.getPipeline().containsUnionWith()) { - return new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper); - } - - return new RelaxedTypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper); + if (aggregation instanceof TypedAggregation ta) { + return new TypeBasedAggregationOperationContext(ta.getInputType(), mappingContext, queryMapper, lookupPolicy); } - inputType = ((TypedAggregation) aggregation).getInputType(); - if (domainTypeMapping == DomainTypeMapping.STRICT - && !aggregation.getPipeline().containsUnionWith()) { - return new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper); + if (inputType == null) { + return untypedMappingContext.get(); } - return new RelaxedTypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper); + return new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper, lookupPolicy); } /** @@ -104,12 +85,7 @@ AggregationOperationContext createAggregationContext(Aggregation aggregation, @N * @return */ List createPipeline(Aggregation aggregation, AggregationOperationContext context) { - - if (ObjectUtils.nullSafeEquals(context, Aggregation.DEFAULT_CONTEXT)) { - return aggregation.toPipeline(context); - } - - return mapAggregationPipeline(aggregation.toPipeline(context)); + return aggregation.toPipeline(context); } /** @@ -120,21 +96,7 @@ List createPipeline(Aggregation aggregation, AggregationOperationConte * @return */ Document createCommand(String collection, Aggregation aggregation, AggregationOperationContext context) { - - Document command = aggregation.toDocument(collection, context); - - if (!ObjectUtils.nullSafeEquals(context, Aggregation.DEFAULT_CONTEXT)) { - return command; - } - - command.put("pipeline", mapAggregationPipeline(command.get("pipeline", List.class))); - - return command; + return aggregation.toDocument(collection, context); } - private List mapAggregationPipeline(List pipeline) { - - return pipeline.stream().map(val -> queryMapper.getMappedObject(val, Optional.empty())) - .collect(Collectors.toList()); - } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperations.java index f214ff3d4b..4820c2355c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperations.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2021 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,18 +17,33 @@ import java.util.List; +import org.springframework.data.domain.Sort; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.util.Pair; import com.mongodb.bulk.BulkWriteResult; /** - * Bulk operations for insert/update/remove actions on a collection. These bulks operation are available since MongoDB - * 2.6 and make use of low level bulk commands on the protocol level. This interface defines a fluent API to add - * multiple single operations or list of similar operations in sequence which can then eventually be executed by calling + * Bulk operations for insert/update/remove actions on a collection. Bulk operations are available since MongoDB 2.6 and + * make use of low level bulk commands on the protocol level. This interface defines a fluent API to add multiple single + * operations or list of similar operations in sequence which can then eventually be executed by calling * {@link #execute()}. * + *

+ * MongoOperations ops = …;
+ *
+ * ops.bulkOps(BulkMode.UNORDERED, Person.class)
+ * 				.insert(newPerson)
+ * 				.updateOne(where("firstname").is("Joe"), Update.update("lastname", "Doe"))
+ * 				.execute();
+ * 
+ *

+ * Bulk operations are issued as one batch that pulls together all insert, update, and delete operations. Operations + * that require individual operation results such as optimistic locking (using {@code @Version}) are not supported and + * the version field remains not populated. + * * @author Tobias Trelle * @author Oliver Gierke * @author Minsu Kim @@ -46,7 +61,7 @@ enum BulkMode { /** Perform bulk operations in parallel. Processing will continue on errors. */ UNORDERED - }; + } /** * Add a single insert to the bulk operation. @@ -67,11 +82,25 @@ enum BulkMode { /** * Add a single update to the bulk operation. For the update request, only the first matching document is updated. * - * @param query update criteria, must not be {@literal null}. + * @param query update criteria, must not be {@literal null}. The {@link Query} may define a {@link Query#with(Sort) + * sort order} to influence which document to update when potentially matching multiple candidates. + * @param update {@link Update} operation to perform, must not be {@literal null}. + * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. + */ + default BulkOperations updateOne(Query query, Update update) { + return updateOne(query, (UpdateDefinition) update); + } + + /** + * Add a single update to the bulk operation. For the update request, only the first matching document is updated. + * + * @param query update criteria, must not be {@literal null}. The {@link Query} may define a {@link Query#with(Sort) + * sort order} to influence which document to update when potentially matching multiple candidates. * @param update {@link Update} operation to perform, must not be {@literal null}. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. + * @since 4.1 */ - BulkOperations updateOne(Query query, Update update); + BulkOperations updateOne(Query query, UpdateDefinition update); /** * Add a list of updates to the bulk operation. For each update request, only the first matching document is updated. @@ -79,7 +108,18 @@ enum BulkMode { * @param updates Update operations to perform. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. */ - BulkOperations updateOne(List> updates); + BulkOperations updateOne(List> updates); + + /** + * Add a single update to the bulk operation. For the update request, all matching documents are updated. + * + * @param query Update criteria. + * @param update Update operation to perform. + * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. + */ + default BulkOperations updateMulti(Query query, Update update) { + return updateMulti(query, (UpdateDefinition) update); + } /** * Add a single update to the bulk operation. For the update request, all matching documents are updated. @@ -87,8 +127,9 @@ enum BulkMode { * @param query Update criteria. * @param update Update operation to perform. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. + * @since 4.1 */ - BulkOperations updateMulti(Query query, Update update); + BulkOperations updateMulti(Query query, UpdateDefinition update); /** * Add a list of updates to the bulk operation. For each update request, all matching documents are updated. @@ -96,7 +137,19 @@ enum BulkMode { * @param updates Update operations to perform. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. */ - BulkOperations updateMulti(List> updates); + BulkOperations updateMulti(List> updates); + + /** + * Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty, + * else an insert. + * + * @param query Update criteria. + * @param update Update operation to perform. + * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. + */ + default BulkOperations upsert(Query query, Update update) { + return upsert(query, (UpdateDefinition) update); + } /** * Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty, @@ -105,8 +158,9 @@ enum BulkMode { * @param query Update criteria. * @param update Update operation to perform. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. + * @since 4.1 */ - BulkOperations upsert(Query query, Update update); + BulkOperations upsert(Query query, UpdateDefinition update); /** * Add a list of upserts to the bulk operation. An upsert is an update if the set of matching documents is not empty, @@ -136,9 +190,10 @@ enum BulkMode { /** * Add a single replace operation to the bulk operation. * - * @param query Update criteria. + * @param query Replace criteria. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence + * which document to replace when potentially matching multiple candidates. * @param replacement the replacement document. Must not be {@literal null}. - * @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}. + * @return the current {@link BulkOperations} instance with the replacement added, will never be {@literal null}. * @since 2.2 */ default BulkOperations replaceOne(Query query, Object replacement) { @@ -148,10 +203,11 @@ default BulkOperations replaceOne(Query query, Object replacement) { /** * Add a single replace operation to the bulk operation. * - * @param query Update criteria. + * @param query Replace criteria. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence + * which document to replace when potentially matching multiple candidates. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. - * @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}. + * @return the current {@link BulkOperations} instance with the replacement added, will never be {@literal null}. * @since 2.2 */ BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperationsSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperationsSupport.java new file mode 100644 index 0000000000..1f5509cd60 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperationsSupport.java @@ -0,0 +1,243 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +import org.bson.Document; +import org.bson.conversions.Bson; +import org.springframework.context.ApplicationEvent; +import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; +import org.springframework.util.Assert; + +import com.mongodb.client.model.BulkWriteOptions; +import com.mongodb.client.model.DeleteManyModel; +import com.mongodb.client.model.DeleteOneModel; +import com.mongodb.client.model.InsertOneModel; +import com.mongodb.client.model.ReplaceOneModel; +import com.mongodb.client.model.UpdateManyModel; +import com.mongodb.client.model.UpdateOneModel; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.client.model.WriteModel; + +/** + * Support class for bulk operations. + * + * @author Mark Paluch + * @since 4.1 + */ +abstract class BulkOperationsSupport { + + private final String collectionName; + + BulkOperationsSupport(String collectionName) { + + Assert.hasText(collectionName, "CollectionName must not be null nor empty"); + + this.collectionName = collectionName; + } + + /** + * Emit a {@link BeforeSaveEvent}. + * + * @param holder + */ + void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) { + + if (holder.model() instanceof InsertOneModel) { + + Document target = ((InsertOneModel) holder.model()).getDocument(); + maybeEmitEvent(new BeforeSaveEvent<>(holder.source(), target, collectionName)); + } else if (holder.model() instanceof ReplaceOneModel) { + + Document target = ((ReplaceOneModel) holder.model()).getReplacement(); + maybeEmitEvent(new BeforeSaveEvent<>(holder.source(), target, collectionName)); + } + } + + /** + * Emit a {@link AfterSaveEvent}. + * + * @param holder + */ + void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) { + + if (holder.model() instanceof InsertOneModel) { + + Document target = ((InsertOneModel) holder.model()).getDocument(); + maybeEmitEvent(new AfterSaveEvent<>(holder.source(), target, collectionName)); + } else if (holder.model() instanceof ReplaceOneModel) { + + Document target = ((ReplaceOneModel) holder.model()).getReplacement(); + maybeEmitEvent(new AfterSaveEvent<>(holder.source(), target, collectionName)); + } + } + + WriteModel mapWriteModel(Object source, WriteModel writeModel) { + + if (writeModel instanceof UpdateOneModel model) { + + Bson sort = model.getOptions().getSort(); + if (sort instanceof Document sortDocument) { + model.getOptions().sort(updateMapper().getMappedSort(sortDocument, entity().orElse(null))); + } + + if (source instanceof AggregationUpdate aggregationUpdate) { + + List pipeline = mapUpdatePipeline(aggregationUpdate); + return new UpdateOneModel<>(getMappedQuery(model.getFilter()), pipeline, model.getOptions()); + } + + return new UpdateOneModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()), + model.getOptions()); + } + + if (writeModel instanceof UpdateManyModel model) { + + if (source instanceof AggregationUpdate aggregationUpdate) { + + List pipeline = mapUpdatePipeline(aggregationUpdate); + return new UpdateManyModel<>(getMappedQuery(model.getFilter()), pipeline, model.getOptions()); + } + + return new UpdateManyModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()), + model.getOptions()); + } + + if (writeModel instanceof DeleteOneModel model) { + return new DeleteOneModel<>(getMappedQuery(model.getFilter()), model.getOptions()); + } + + if (writeModel instanceof DeleteManyModel model) { + return new DeleteManyModel<>(getMappedQuery(model.getFilter()), model.getOptions()); + } + + if (writeModel instanceof ReplaceOneModel model) { + + Bson sort = model.getReplaceOptions().getSort(); + + if (sort instanceof Document sortDocument) { + model.getReplaceOptions().sort(updateMapper().getMappedSort(sortDocument, entity().orElse(null))); + } + return new ReplaceOneModel<>(getMappedQuery(model.getFilter()), model.getReplacement(), + model.getReplaceOptions()); + } + + return writeModel; + } + + private List mapUpdatePipeline(AggregationUpdate source) { + + Class type = entity().isPresent() ? entity().map(PersistentEntity::getType).get() : Object.class; + AggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(type, + updateMapper().getMappingContext(), queryMapper()); + + return new AggregationUtil(queryMapper(), queryMapper().getMappingContext()).createPipeline(source, context); + } + + /** + * Emit a {@link ApplicationEvent} if event multicasting is enabled. + * + * @param event + */ + protected abstract void maybeEmitEvent(ApplicationEvent event); + + /** + * @return the {@link UpdateMapper} to use. + */ + protected abstract UpdateMapper updateMapper(); + + /** + * @return the {@link QueryMapper} to use. + */ + protected abstract QueryMapper queryMapper(); + + /** + * @return the associated {@link PersistentEntity}. Can be {@link Optional#empty()}. + */ + protected abstract Optional> entity(); + + protected Bson getMappedUpdate(Bson update) { + return updateMapper().getMappedObject(update, entity()); + } + + protected Bson getMappedQuery(Bson query) { + return queryMapper().getMappedObject(query, entity()); + } + + protected static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) { + + BulkWriteOptions options = new BulkWriteOptions(); + + return switch (bulkMode) { + case ORDERED -> options.ordered(true); + case UNORDERED -> options.ordered(false); + }; + } + + /** + * @param filterQuery The {@link Query} to read a potential {@link Collation} from. Must not be {@literal null}. + * @param update The {@link Update} to apply + * @param upsert flag to indicate if document should be upserted. + * @param multi flag to indicate if update might affect multiple documents. + * @return new instance of {@link UpdateOptions}. + */ + protected UpdateOptions computeUpdateOptions(Query filterQuery, UpdateDefinition update, boolean upsert, + boolean multi) { + + UpdateOptions options = new UpdateOptions(); + options.upsert(upsert); + + if (update.hasArrayFilters()) { + List list = new ArrayList<>(update.getArrayFilters().size()); + for (ArrayFilter arrayFilter : update.getArrayFilters()) { + list.add(arrayFilter.asDocument()); + } + options.arrayFilters(list); + } + + if (!multi && filterQuery.isSorted()) { + options.sort(filterQuery.getSortObject()); + } + + filterQuery.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation); + return options; + } + + /** + * Value object chaining together an actual source with its {@link WriteModel} representation. + * + * @author Christoph Strobl + */ + record SourceAwareWriteModelHolder(Object source, WriteModel model) { + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamEvent.java index 55a65a2baa..17b8835b7e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamEvent.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -36,21 +36,29 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Myroslav Kosinskyi * @since 2.1 */ public class ChangeStreamEvent { @SuppressWarnings("rawtypes") // - private static final AtomicReferenceFieldUpdater CONVERTED_UPDATER = AtomicReferenceFieldUpdater - .newUpdater(ChangeStreamEvent.class, Object.class, "converted"); + private static final AtomicReferenceFieldUpdater CONVERTED_FULL_DOCUMENT_UPDATER = AtomicReferenceFieldUpdater + .newUpdater(ChangeStreamEvent.class, Object.class, "convertedFullDocument"); + + @SuppressWarnings("rawtypes") // + private static final AtomicReferenceFieldUpdater CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER = AtomicReferenceFieldUpdater + .newUpdater(ChangeStreamEvent.class, Object.class, "convertedFullDocumentBeforeChange"); private final @Nullable ChangeStreamDocument raw; private final Class targetType; private final MongoConverter converter; - // accessed through CONVERTED_UPDATER. - private volatile @Nullable T converted; + // accessed through CONVERTED_FULL_DOCUMENT_UPDATER. + private volatile @Nullable T convertedFullDocument; + + // accessed through CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER. + private volatile @Nullable T convertedFullDocumentBeforeChange; /** * @param raw can be {@literal null}. @@ -147,27 +155,43 @@ public String getCollectionName() { @Nullable public T getBody() { - if (raw == null) { + if (raw == null || raw.getFullDocument() == null) { return null; } - Document fullDocument = raw.getFullDocument(); + return getConvertedFullDocument(raw.getFullDocument()); + } - if (fullDocument == null) { - return targetType.cast(fullDocument); + /** + * Get the potentially converted {@link ChangeStreamDocument#getFullDocumentBeforeChange() document} before being changed. + * + * @return {@literal null} when {@link #getRaw()} or {@link ChangeStreamDocument#getFullDocumentBeforeChange()} is + * {@literal null}. + * @since 4.0 + */ + @Nullable + public T getBodyBeforeChange() { + + if (raw == null || raw.getFullDocumentBeforeChange() == null) { + return null; } - return getConverted(fullDocument); + return getConvertedFullDocumentBeforeChange(raw.getFullDocumentBeforeChange()); } @SuppressWarnings("unchecked") - private T getConverted(Document fullDocument) { - return (T) doGetConverted(fullDocument); + private T getConvertedFullDocumentBeforeChange(Document fullDocument) { + return (T) doGetConverted(fullDocument, CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER); } - private Object doGetConverted(Document fullDocument) { + @SuppressWarnings("unchecked") + private T getConvertedFullDocument(Document fullDocument) { + return (T) doGetConverted(fullDocument, CONVERTED_FULL_DOCUMENT_UPDATER); + } + + private Object doGetConverted(Document fullDocument, AtomicReferenceFieldUpdater updater) { - Object result = CONVERTED_UPDATER.get(this); + Object result = updater.get(this); if (result != null) { return result; @@ -176,30 +200,26 @@ private Object doGetConverted(Document fullDocument) { if (ClassUtils.isAssignable(Document.class, fullDocument.getClass())) { result = converter.read(targetType, fullDocument); - return CONVERTED_UPDATER.compareAndSet(this, null, result) ? result : CONVERTED_UPDATER.get(this); + return updater.compareAndSet(this, null, result) ? result : updater.get(this); } if (converter.getConversionService().canConvert(fullDocument.getClass(), targetType)) { result = converter.getConversionService().convert(fullDocument, targetType); - return CONVERTED_UPDATER.compareAndSet(this, null, result) ? result : CONVERTED_UPDATER.get(this); + return updater.compareAndSet(this, null, result) ? result : updater.get(this); } throw new IllegalArgumentException( String.format("No converter found capable of converting %s to %s", fullDocument.getClass(), targetType)); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return "ChangeStreamEvent {" + "raw=" + raw + ", targetType=" + targetType + '}'; } @Override - public boolean equals(Object o) { + public boolean equals(@Nullable Object o) { if (this == o) return true; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java index a4f6f7e226..aaee3b76af 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,6 +32,7 @@ import com.mongodb.client.model.changestream.ChangeStreamDocument; import com.mongodb.client.model.changestream.FullDocument; +import com.mongodb.client.model.changestream.FullDocumentBeforeChange; /** * Options applicable to MongoDB Change Streams. Intended @@ -40,6 +41,7 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Myroslav Kosinskyi * @since 2.1 */ public class ChangeStreamOptions { @@ -47,6 +49,7 @@ public class ChangeStreamOptions { private @Nullable Object filter; private @Nullable BsonValue resumeToken; private @Nullable FullDocument fullDocumentLookup; + private @Nullable FullDocumentBeforeChange fullDocumentBeforeChangeLookup; private @Nullable Collation collation; private @Nullable Object resumeTimestamp; private Resume resume = Resume.UNDEFINED; @@ -74,6 +77,14 @@ public Optional getFullDocumentLookup() { return Optional.ofNullable(fullDocumentLookup); } + /** + * @return {@link Optional#empty()} if not set. + * @since 4.0 + */ + public Optional getFullDocumentBeforeChangeLookup() { + return Optional.ofNullable(fullDocumentBeforeChangeLookup); + } + /** * @return {@link Optional#empty()} if not set. */ @@ -121,7 +132,7 @@ public static ChangeStreamOptions empty() { /** * Obtain a shiny new {@link ChangeStreamOptionsBuilder} and start defining options in this fancy fluent way. Just - * don't forget to call {@link ChangeStreamOptionsBuilder#build() build()} when your're done. + * don't forget to call {@link ChangeStreamOptionsBuilder#build() build()} when done. * * @return new instance of {@link ChangeStreamOptionsBuilder}. */ @@ -139,21 +150,21 @@ private static Object doGetTimestamp(Object timestamp, Class targetType) return timestamp; } - if (timestamp instanceof Instant) { - return new BsonTimestamp((int) ((Instant) timestamp).getEpochSecond(), 0); + if (timestamp instanceof Instant instant) { + return new BsonTimestamp((int) instant.getEpochSecond(), 0); } - if (timestamp instanceof BsonTimestamp) { - return Instant.ofEpochSecond(((BsonTimestamp) timestamp).getTime()); + if (timestamp instanceof BsonTimestamp bsonTimestamp) { + return Instant.ofEpochSecond(bsonTimestamp.getTime()); } throw new IllegalArgumentException( - "o_O that should actually not happen. The timestamp should be an Instant or a BsonTimestamp but was " + "o_O that should actually not happen; The timestamp should be an Instant or a BsonTimestamp but was " + ObjectUtils.nullSafeClassName(timestamp)); } @Override - public boolean equals(Object o) { + public boolean equals(@Nullable Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) @@ -170,6 +181,9 @@ public boolean equals(Object o) { if (!ObjectUtils.nullSafeEquals(this.fullDocumentLookup, that.fullDocumentLookup)) { return false; } + if (!ObjectUtils.nullSafeEquals(this.fullDocumentBeforeChangeLookup, that.fullDocumentBeforeChangeLookup)) { + return false; + } if (!ObjectUtils.nullSafeEquals(this.collation, that.collation)) { return false; } @@ -184,6 +198,7 @@ public int hashCode() { int result = ObjectUtils.nullSafeHashCode(filter); result = 31 * result + ObjectUtils.nullSafeHashCode(resumeToken); result = 31 * result + ObjectUtils.nullSafeHashCode(fullDocumentLookup); + result = 31 * result + ObjectUtils.nullSafeHashCode(fullDocumentBeforeChangeLookup); result = 31 * result + ObjectUtils.nullSafeHashCode(collation); result = 31 * result + ObjectUtils.nullSafeHashCode(resumeTimestamp); result = 31 * result + ObjectUtils.nullSafeHashCode(resume); @@ -220,6 +235,7 @@ public static class ChangeStreamOptionsBuilder { private @Nullable Object filter; private @Nullable BsonValue resumeToken; private @Nullable FullDocument fullDocumentLookup; + private @Nullable FullDocumentBeforeChange fullDocumentBeforeChangeLookup; private @Nullable Collation collation; private @Nullable Object resumeTimestamp; private Resume resume = Resume.UNDEFINED; @@ -234,7 +250,7 @@ private ChangeStreamOptionsBuilder() {} */ public ChangeStreamOptionsBuilder collation(Collation collation) { - Assert.notNull(collation, "Collation must not be null nor empty!"); + Assert.notNull(collation, "Collation must not be null nor empty"); this.collation = collation; return this; @@ -242,13 +258,13 @@ public ChangeStreamOptionsBuilder collation(Collation collation) { /** * Set the filter to apply. - *

+ *
* Fields on aggregation expression root level are prefixed to map to fields contained in * {@link ChangeStreamDocument#getFullDocument() fullDocument}. However {@literal operationType}, {@literal ns}, * {@literal documentKey} and {@literal fullDocument} are reserved words that will be omitted, and therefore taken * as given, during the mapping procedure. You may want to have a look at the * structure of Change Events. - *

+ *
* Use {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} to ensure filter expressions are * mapped to domain type fields. * @@ -258,7 +274,7 @@ public ChangeStreamOptionsBuilder collation(Collation collation) { */ public ChangeStreamOptionsBuilder filter(Aggregation filter) { - Assert.notNull(filter, "Filter must not be null!"); + Assert.notNull(filter, "Filter must not be null"); this.filter = filter; return this; @@ -287,7 +303,7 @@ public ChangeStreamOptionsBuilder filter(Document... filter) { */ public ChangeStreamOptionsBuilder resumeToken(BsonValue resumeToken) { - Assert.notNull(resumeToken, "ResumeToken must not be null!"); + Assert.notNull(resumeToken, "ResumeToken must not be null"); this.resumeToken = resumeToken; @@ -316,12 +332,38 @@ public ChangeStreamOptionsBuilder returnFullDocumentOnUpdate() { */ public ChangeStreamOptionsBuilder fullDocumentLookup(FullDocument lookup) { - Assert.notNull(lookup, "Lookup must not be null!"); + Assert.notNull(lookup, "Lookup must not be null"); this.fullDocumentLookup = lookup; return this; } + /** + * Set the {@link FullDocumentBeforeChange} lookup to use. + * + * @param lookup must not be {@literal null}. + * @return this. + * @since 4.0 + */ + public ChangeStreamOptionsBuilder fullDocumentBeforeChangeLookup(FullDocumentBeforeChange lookup) { + + Assert.notNull(lookup, "Lookup must not be null"); + + this.fullDocumentBeforeChangeLookup = lookup; + return this; + } + + /** + * Return the full document before being changed if it is available. + * + * @return this. + * @since 4.0 + * @see #fullDocumentBeforeChangeLookup(FullDocumentBeforeChange) + */ + public ChangeStreamOptionsBuilder returnFullDocumentBeforeChange() { + return fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.WHEN_AVAILABLE); + } + /** * Set the cluster time to resume from. * @@ -330,7 +372,7 @@ public ChangeStreamOptionsBuilder fullDocumentLookup(FullDocument lookup) { */ public ChangeStreamOptionsBuilder resumeAt(Instant resumeTimestamp) { - Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null!"); + Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null"); this.resumeTimestamp = resumeTimestamp; return this; @@ -345,7 +387,7 @@ public ChangeStreamOptionsBuilder resumeAt(Instant resumeTimestamp) { */ public ChangeStreamOptionsBuilder resumeAt(BsonTimestamp resumeTimestamp) { - Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null!"); + Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null"); this.resumeTimestamp = resumeTimestamp; return this; @@ -391,6 +433,7 @@ public ChangeStreamOptions build() { options.filter = this.filter; options.resumeToken = this.resumeToken; options.fullDocumentLookup = this.fullDocumentLookup; + options.fullDocumentBeforeChangeLookup = this.fullDocumentBeforeChangeLookup; options.collation = this.collation; options.resumeTimestamp = this.resumeTimestamp; options.resume = this.resume; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionCallback.java index 5c79cffd6c..c142aca173 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionCallback.java @@ -1,5 +1,5 @@ /* - * Copyright 2010-2021 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java index ca61d18d96..5df30e0b92 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java @@ -1,5 +1,5 @@ /* - * Copyright 2010-2021 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,14 +15,37 @@ */ package org.springframework.data.mongodb.core; +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; import java.util.Optional; +import java.util.function.Function; +import java.util.stream.StreamSupport; +import org.bson.BsonBinary; +import org.bson.BsonBinarySubType; +import org.bson.BsonNull; +import org.bson.Document; +import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.QueryableJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.core.schema.QueryCharacteristic; +import org.springframework.data.mongodb.core.timeseries.Granularity; +import org.springframework.data.mongodb.core.timeseries.GranularityDefinition; import org.springframework.data.mongodb.core.validation.Validator; import org.springframework.data.util.Optionals; +import org.springframework.lang.CheckReturnValue; +import org.springframework.lang.Contract; import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; import com.mongodb.client.model.ValidationAction; import com.mongodb.client.model.ValidationLevel; @@ -34,6 +57,8 @@ * @author Christoph Strobl * @author Mark Paluch * @author Andreas Zink + * @author Ben Foster + * @author Ross Lawley */ public class CollectionOptions { @@ -42,29 +67,23 @@ public class CollectionOptions { private @Nullable Boolean capped; private @Nullable Collation collation; private ValidationOptions validationOptions; - - /** - * Constructs a new CollectionOptions instance. - * - * @param size the collection size in bytes, this data space is preallocated. Can be {@literal null}. - * @param maxDocuments the maximum number of documents in the collection. Can be {@literal null}. - * @param capped true to created a "capped" collection (fixed size with auto-FIFO behavior based on insertion order), - * false otherwise. Can be {@literal null}. - * @deprecated since 2.0 please use {@link CollectionOptions#empty()} as entry point. - */ - @Deprecated - public CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped) { - this(size, maxDocuments, capped, null, ValidationOptions.none()); - } + private @Nullable TimeSeriesOptions timeSeriesOptions; + private @Nullable CollectionChangeStreamOptions changeStreamOptions; + private @Nullable EncryptedFieldsOptions encryptedFieldsOptions; private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped, - @Nullable Collation collation, ValidationOptions validationOptions) { + @Nullable Collation collation, ValidationOptions validationOptions, @Nullable TimeSeriesOptions timeSeriesOptions, + @Nullable CollectionChangeStreamOptions changeStreamOptions, + @Nullable EncryptedFieldsOptions encryptedFieldsOptions) { this.maxDocuments = maxDocuments; this.size = size; this.capped = capped; this.collation = collation; this.validationOptions = validationOptions; + this.timeSeriesOptions = timeSeriesOptions; + this.changeStreamOptions = changeStreamOptions; + this.encryptedFieldsOptions = encryptedFieldsOptions; } /** @@ -76,9 +95,9 @@ private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nul */ public static CollectionOptions just(Collation collation) { - Assert.notNull(collation, "Collation must not be null!"); + Assert.notNull(collation, "Collation must not be null"); - return new CollectionOptions(null, null, null, collation, ValidationOptions.none()); + return new CollectionOptions(null, null, null, collation, ValidationOptions.none(), null, null, null); } /** @@ -88,18 +107,97 @@ public static CollectionOptions just(Collation collation) { * @since 2.0 */ public static CollectionOptions empty() { - return new CollectionOptions(null, null, null, null, ValidationOptions.none()); + return new CollectionOptions(null, null, null, null, ValidationOptions.none(), null, null, null); + } + + /** + * Quick way to set up {@link CollectionOptions} for a Time Series collection. For more advanced settings use + * {@link #timeSeries(String, Function)}. + * + * @param timeField The name of the property which contains the date in each time series document. Must not be + * {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @see #timeSeries(TimeSeriesOptions) + * @since 3.3 + */ + public static CollectionOptions timeSeries(String timeField) { + return timeSeries(timeField, it -> it); + } + + /** + * Set up {@link CollectionOptions} for a Time Series collection. + * + * @param timeField the name of the field that contains the date in each time series document. + * @param options a function to apply additional settings to {@link TimeSeriesOptions}. + * @return new instance of {@link CollectionOptions}. + * @since 4.4 + */ + public static CollectionOptions timeSeries(String timeField, Function options) { + return empty().timeSeries(options.apply(TimeSeriesOptions.timeSeries(timeField))); + } + + /** + * Quick way to set up {@link CollectionOptions} for emitting (pre & post) change events. + * + * @return new instance of {@link CollectionOptions}. + * @see #changeStream(CollectionChangeStreamOptions) + * @see CollectionChangeStreamOptions#preAndPostImages(boolean) + * @since 4.0 + */ + public static CollectionOptions emitChangedRevisions() { + return empty().changeStream(CollectionChangeStreamOptions.preAndPostImages(true)); + } + + /** + * Create new {@link CollectionOptions} with the given {@code encryptedFields}. + * + * @param encryptedFieldsOptions can be null + * @return new instance of {@link CollectionOptions}. + * @since 4.5.0 + */ + @Contract("_ -> new") + @CheckReturnValue + public static CollectionOptions encryptedCollection(@Nullable EncryptedFieldsOptions encryptedFieldsOptions) { + return new CollectionOptions(null, null, null, null, ValidationOptions.NONE, null, null, encryptedFieldsOptions); + } + + /** + * Create new {@link CollectionOptions} reading encryption options from the given {@link MongoJsonSchema}. + * + * @param schema must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @since 4.5.0 + */ + @Contract("_ -> new") + @CheckReturnValue + public static CollectionOptions encryptedCollection(MongoJsonSchema schema) { + return encryptedCollection(EncryptedFieldsOptions.fromSchema(schema)); + } + + /** + * Create new {@link CollectionOptions} building encryption options in a fluent style. + * + * @param optionsFunction must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @since 4.5.0 + */ + @Contract("_ -> new") + @CheckReturnValue + public static CollectionOptions encryptedCollection( + Function optionsFunction) { + return encryptedCollection(optionsFunction.apply(new EncryptedFieldsOptions())); } /** * Create new {@link CollectionOptions} with already given settings and capped set to {@literal true}.
- * NOTE Using capped collections requires defining {@link #size(long)}. + * NOTE: Using capped collections requires defining {@link #size(long)}. * * @return new {@link CollectionOptions}. * @since 2.0 */ public CollectionOptions capped() { - return new CollectionOptions(size, maxDocuments, true, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, true, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); } /** @@ -110,7 +208,8 @@ public CollectionOptions capped() { * @since 2.0 */ public CollectionOptions maxDocuments(long maxDocuments) { - return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); } /** @@ -121,7 +220,8 @@ public CollectionOptions maxDocuments(long maxDocuments) { * @since 2.0 */ public CollectionOptions size(long size) { - return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); } /** @@ -132,18 +232,19 @@ public CollectionOptions size(long size) { * @since 2.0 */ public CollectionOptions collation(@Nullable Collation collation) { - return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); } /** * Create new {@link CollectionOptions} with already given settings and {@code validationOptions} set to given * {@link MongoJsonSchema}. * - * @param schema can be {@literal null}. + * @param schema must not be {@literal null}. * @return new {@link CollectionOptions}. * @since 2.1 */ - public CollectionOptions schema(@Nullable MongoJsonSchema schema) { + public CollectionOptions schema(MongoJsonSchema schema) { return validator(Validator.schema(schema)); } @@ -224,7 +325,7 @@ public CollectionOptions failOnValidationError() { */ public CollectionOptions schemaValidationLevel(ValidationLevel validationLevel) { - Assert.notNull(validationLevel, "ValidationLevel must not be null!"); + Assert.notNull(validationLevel, "ValidationLevel must not be null"); return validation(validationOptions.validationLevel(validationLevel)); } @@ -238,7 +339,7 @@ public CollectionOptions schemaValidationLevel(ValidationLevel validationLevel) */ public CollectionOptions schemaValidationAction(ValidationAction validationAction) { - Assert.notNull(validationAction, "ValidationAction must not be null!"); + Assert.notNull(validationAction, "ValidationAction must not be null"); return validation(validationOptions.validationAction(validationAction)); } @@ -251,8 +352,52 @@ public CollectionOptions schemaValidationAction(ValidationAction validationActio */ public CollectionOptions validation(ValidationOptions validationOptions) { - Assert.notNull(validationOptions, "ValidationOptions must not be null!"); - return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions); + Assert.notNull(validationOptions, "ValidationOptions must not be null"); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); + } + + /** + * Create new {@link CollectionOptions} with the given {@link TimeSeriesOptions}. + * + * @param timeSeriesOptions must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @since 3.3 + */ + public CollectionOptions timeSeries(TimeSeriesOptions timeSeriesOptions) { + + Assert.notNull(timeSeriesOptions, "TimeSeriesOptions must not be null"); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); + } + + /** + * Create new {@link CollectionOptions} with the given {@link TimeSeriesOptions}. + * + * @param changeStreamOptions must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @since 3.3 + */ + public CollectionOptions changeStream(CollectionChangeStreamOptions changeStreamOptions) { + + Assert.notNull(changeStreamOptions, "ChangeStreamOptions must not be null"); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); + } + + /** + * Set the {@link EncryptedFieldsOptions} for collections using queryable encryption. + * + * @param encryptedFieldsOptions must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + */ + @Contract("_ -> new") + @CheckReturnValue + public CollectionOptions encrypted(EncryptedFieldsOptions encryptedFieldsOptions) { + + Assert.notNull(encryptedFieldsOptions, "EncryptedCollectionOptions must not be null"); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); } /** @@ -303,6 +448,94 @@ public Optional getValidationOptions() { return validationOptions.isEmpty() ? Optional.empty() : Optional.of(validationOptions); } + /** + * Get the {@link TimeSeriesOptions} if available. + * + * @return {@link Optional#empty()} if not specified. + * @since 3.3 + */ + public Optional getTimeSeriesOptions() { + return Optional.ofNullable(timeSeriesOptions); + } + + /** + * Get the {@link CollectionChangeStreamOptions} if available. + * + * @return {@link Optional#empty()} if not specified. + * @since 4.0 + */ + public Optional getChangeStreamOptions() { + return Optional.ofNullable(changeStreamOptions); + } + + /** + * Get the {@code encryptedFields} if available. + * + * @return {@link Optional#empty()} if not specified. + * @since 4.5 + */ + public Optional getEncryptedFieldsOptions() { + return Optional.ofNullable(encryptedFieldsOptions); + } + + @Override + public String toString() { + return "CollectionOptions{" + "maxDocuments=" + maxDocuments + ", size=" + size + ", capped=" + capped + + ", collation=" + collation + ", validationOptions=" + validationOptions + ", timeSeriesOptions=" + + timeSeriesOptions + ", changeStreamOptions=" + changeStreamOptions + ", encryptedCollectionOptions=" + + encryptedFieldsOptions + ", disableValidation=" + disableValidation() + ", strictValidation=" + + strictValidation() + ", moderateValidation=" + moderateValidation() + ", warnOnValidationError=" + + warnOnValidationError() + ", failOnValidationError=" + failOnValidationError() + '}'; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + CollectionOptions that = (CollectionOptions) o; + + if (!ObjectUtils.nullSafeEquals(maxDocuments, that.maxDocuments)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(size, that.size)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(capped, that.capped)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(collation, that.collation)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(validationOptions, that.validationOptions)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(timeSeriesOptions, that.timeSeriesOptions)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(changeStreamOptions, that.changeStreamOptions)) { + return false; + } + return ObjectUtils.nullSafeEquals(encryptedFieldsOptions, that.encryptedFieldsOptions); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(maxDocuments); + result = 31 * result + ObjectUtils.nullSafeHashCode(size); + result = 31 * result + ObjectUtils.nullSafeHashCode(capped); + result = 31 * result + ObjectUtils.nullSafeHashCode(collation); + result = 31 * result + ObjectUtils.nullSafeHashCode(validationOptions); + result = 31 * result + ObjectUtils.nullSafeHashCode(timeSeriesOptions); + result = 31 * result + ObjectUtils.nullSafeHashCode(changeStreamOptions); + result = 31 * result + ObjectUtils.nullSafeHashCode(encryptedFieldsOptions); + return result; + } + /** * Encapsulation of ValidationOptions options. * @@ -318,7 +551,8 @@ public static class ValidationOptions { private final @Nullable ValidationLevel validationLevel; private final @Nullable ValidationAction validationAction; - public ValidationOptions(Validator validator, ValidationLevel validationLevel, ValidationAction validationAction) { + public ValidationOptions(@Nullable Validator validator, @Nullable ValidationLevel validationLevel, + @Nullable ValidationAction validationAction) { this.validator = validator; this.validationLevel = validationLevel; @@ -385,7 +619,7 @@ public Optional getValidationLevel() { /** * Get the {@code validationAction} to perform. * - * @return @return {@link Optional#empty()} if not set. + * @return {@link Optional#empty()} if not set. */ public Optional getValidationAction() { return Optional.ofNullable(validationAction); @@ -397,5 +631,418 @@ public Optional getValidationAction() { boolean isEmpty() { return !Optionals.isAnyPresent(getValidator(), getValidationAction(), getValidationLevel()); } + + @Override + public String toString() { + + return "ValidationOptions{" + "validator=" + validator + ", validationLevel=" + validationLevel + + ", validationAction=" + validationAction + '}'; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + ValidationOptions that = (ValidationOptions) o; + + if (!ObjectUtils.nullSafeEquals(validator, that.validator)) { + return false; + } + if (validationLevel != that.validationLevel) + return false; + return validationAction == that.validationAction; + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(validator); + result = 31 * result + ObjectUtils.nullSafeHashCode(validationLevel); + result = 31 * result + ObjectUtils.nullSafeHashCode(validationAction); + return result; + } + } + + /** + * Encapsulation of Encryption options for collections. + * + * @author Christoph Strobl + * @since 4.5 + */ + public static class EncryptedFieldsOptions { + + private static final EncryptedFieldsOptions NONE = new EncryptedFieldsOptions(); + + private final @Nullable MongoJsonSchema schema; + private final List queryableProperties; + + EncryptedFieldsOptions() { + this(null, List.of()); + } + + private EncryptedFieldsOptions(@Nullable MongoJsonSchema schema, + List queryableProperties) { + + this.schema = schema; + this.queryableProperties = queryableProperties; + } + + /** + * @return {@link EncryptedFieldsOptions#NONE} + */ + public static EncryptedFieldsOptions none() { + return NONE; + } + + /** + * @return new instance of {@link EncryptedFieldsOptions}. + */ + public static EncryptedFieldsOptions fromSchema(MongoJsonSchema schema) { + return new EncryptedFieldsOptions(schema, List.of()); + } + + /** + * @return new instance of {@link EncryptedFieldsOptions}. + */ + public static EncryptedFieldsOptions fromProperties(List properties) { + return new EncryptedFieldsOptions(null, List.copyOf(properties)); + } + + /** + * Add a new {@link QueryableJsonSchemaProperty queryable property} for the given source property. + *

+ * Please note that, a given {@link JsonSchemaProperty} may override options from a given {@link MongoJsonSchema} if + * set. + * + * @param property the queryable source - typically + * {@link org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty + * encrypted}. + * @param characteristics the query options to set. + * @return new instance of {@link EncryptedFieldsOptions}. + */ + @Contract("_, _ -> new") + @CheckReturnValue + public EncryptedFieldsOptions queryable(JsonSchemaProperty property, QueryCharacteristic... characteristics) { + + List targetPropertyList = new ArrayList<>(queryableProperties.size() + 1); + targetPropertyList.addAll(queryableProperties); + targetPropertyList.add(JsonSchemaProperty.queryable(property, List.of(characteristics))); + + return new EncryptedFieldsOptions(schema, targetPropertyList); + } + + public Document toDocument() { + return new Document("fields", selectPaths()); + } + + private List selectPaths() { + + Map fields = new LinkedHashMap<>(); + for (Document field : fromSchema()) { + fields.put(field.get("path", String.class), field); + } + for (Document field : fromProperties()) { + fields.put(field.get("path", String.class), field); + } + return List.copyOf(fields.values()); + } + + private List fromProperties() { + + if (queryableProperties.isEmpty()) { + return List.of(); + } + + List converted = new ArrayList<>(queryableProperties.size()); + for (QueryableJsonSchemaProperty property : queryableProperties) { + + Document field = new Document("path", property.getIdentifier()); + + if (!property.getTypes().isEmpty()) { + field.append("bsonType", property.getTypes().iterator().next().toBsonType().value()); + } + + if (property + .getTargetProperty() instanceof IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty encrypted) { + if (encrypted.getKeyId() != null) { + if (encrypted.getKeyId() instanceof String stringKey) { + field.append("keyId", + new BsonBinary(BsonBinarySubType.UUID_STANDARD, stringKey.getBytes(StandardCharsets.UTF_8))); + } else { + field.append("keyId", encrypted.getKeyId()); + } + } + } + + field.append("queries", StreamSupport.stream(property.getCharacteristics().spliterator(), false) + .map(QueryCharacteristic::toDocument).toList()); + + if (!field.containsKey("keyId")) { + field.append("keyId", BsonNull.VALUE); + } + + converted.add(field); + } + return converted; + } + + private List fromSchema() { + + if (schema == null) { + return List.of(); + } + + Document root = schema.schemaDocument(); + Map paths = new LinkedHashMap<>(); + collectPaths(root, null, paths); + + List fields = new ArrayList<>(); + if (!paths.isEmpty()) { + + for (Entry entry : paths.entrySet()) { + Document field = new Document("path", entry.getKey()); + field.append("keyId", entry.getValue().getOrDefault("keyId", BsonNull.VALUE)); + if (entry.getValue().containsKey("bsonType")) { + field.append("bsonType", entry.getValue().get("bsonType")); + } + field.put("queries", entry.getValue().get("queries")); + fields.add(field); + } + } + + return fields; + } + } + + private static void collectPaths(Document document, @Nullable String currentPath, Map paths) { + + if (document.containsKey("type") && document.get("type").equals("object")) { + Object o = document.get("properties"); + if (o == null) { + return; + } + + if (o instanceof Document properties) { + for (Entry entry : properties.entrySet()) { + if (entry.getValue() instanceof Document nested) { + + String path = currentPath == null ? entry.getKey() : (currentPath + "." + entry.getKey()); + if (nested.containsKey("encrypt")) { + Document target = new Document(nested.get("encrypt", Document.class)); + if (nested.containsKey("queries")) { + List queries = nested.get("queries", List.class); + if (!queries.isEmpty() && queries.iterator().next() instanceof Document qd) { + target.putAll(qd); + } + } + paths.put(path, target); + } else { + collectPaths(nested, path, paths); + } + } + } + } + } + } + + /** + * Encapsulation of options applied to define collections change stream behaviour. + * + * @author Christoph Strobl + * @since 4.0 + */ + public static class CollectionChangeStreamOptions { + + private final boolean preAndPostImages; + + private CollectionChangeStreamOptions(boolean emitChangedRevisions) { + this.preAndPostImages = emitChangedRevisions; + } + + /** + * Output the version of a document before and after changes (the document pre- and post-images). + * + * @return new instance of {@link CollectionChangeStreamOptions}. + */ + public static CollectionChangeStreamOptions preAndPostImages(boolean emitChangedRevisions) { + return new CollectionChangeStreamOptions(true); + } + + public boolean getPreAndPostImages() { + return preAndPostImages; + } + + @Override + public String toString() { + return "CollectionChangeStreamOptions{" + "preAndPostImages=" + preAndPostImages + '}'; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + CollectionChangeStreamOptions that = (CollectionChangeStreamOptions) o; + + return preAndPostImages == that.preAndPostImages; + } + + @Override + public int hashCode() { + return (preAndPostImages ? 1 : 0); + } + } + + /** + * Options applicable to Time Series collections. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/core/timeseries-collections + * @since 3.3 + */ + public static class TimeSeriesOptions { + + private final String timeField; + + private @Nullable final String metaField; + + private final GranularityDefinition granularity; + + private final Duration expireAfter; + + private TimeSeriesOptions(String timeField, @Nullable String metaField, GranularityDefinition granularity, + Duration expireAfter) { + Assert.hasText(timeField, "Time field must not be empty or null"); + + this.timeField = timeField; + this.metaField = metaField; + this.granularity = granularity; + this.expireAfter = expireAfter; + } + + /** + * Create a new instance of {@link TimeSeriesOptions} using the given field as its {@literal timeField}. The one, + * that contains the date in each time series document.
+ * {@link Field#name() Annotated fieldnames} will be considered during the mapping process. + * + * @param timeField must not be {@literal null}. + * @return new instance of {@link TimeSeriesOptions}. + */ + public static TimeSeriesOptions timeSeries(String timeField) { + return new TimeSeriesOptions(timeField, null, Granularity.DEFAULT, Duration.ofSeconds(-1)); + } + + /** + * Set the name of the field which contains metadata in each time series document. Should not be the {@literal id} + * nor {@link TimeSeriesOptions#timeSeries(String)} timeField} nor point to an {@literal array} or + * {@link java.util.Collection}.
+ * {@link Field#name() Annotated fieldnames} will be considered during the mapping process. + * + * @param metaField must not be {@literal null}. + * @return new instance of {@link TimeSeriesOptions}. + */ + public TimeSeriesOptions metaField(String metaField) { + return new TimeSeriesOptions(timeField, metaField, granularity, expireAfter); + } + + /** + * Select the {@link GranularityDefinition} parameter to define how data in the time series collection is organized. + * Select one that is closest to the time span between incoming measurements. + * + * @return new instance of {@link TimeSeriesOptions}. + * @see Granularity + */ + public TimeSeriesOptions granularity(GranularityDefinition granularity) { + return new TimeSeriesOptions(timeField, metaField, granularity, expireAfter); + } + + /** + * Set the {@link Duration} for automatic removal of documents older than a specified value. + * + * @param ttl must not be {@literal null}. + * @return new instance of {@link TimeSeriesOptions}. + * @see com.mongodb.client.model.CreateCollectionOptions#expireAfter(long, java.util.concurrent.TimeUnit) + * @since 4.4 + */ + public TimeSeriesOptions expireAfter(Duration ttl) { + return new TimeSeriesOptions(timeField, metaField, granularity, ttl); + } + + /** + * @return never {@literal null}. + */ + public String getTimeField() { + return timeField; + } + + /** + * @return can be {@literal null}. Might be an {@literal empty} {@link String} as well, so maybe check via + * {@link org.springframework.util.StringUtils#hasText(String)}. + */ + @Nullable + public String getMetaField() { + return metaField; + } + + /** + * @return never {@literal null}. + */ + public GranularityDefinition getGranularity() { + return granularity; + } + + /** + * Get the {@link Duration} for automatic removal of documents. + * + * @return a {@link Duration#isNegative() negative} value if not specified. + * @since 4.4 + */ + public Duration getExpireAfter() { + return expireAfter; + } + + @Override + public String toString() { + + return "TimeSeriesOptions{" + "timeField='" + timeField + '\'' + ", metaField='" + metaField + '\'' + + ", granularity=" + granularity + '}'; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + TimeSeriesOptions that = (TimeSeriesOptions) o; + + if (!ObjectUtils.nullSafeEquals(timeField, that.timeField)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(metaField, that.metaField)) { + return false; + } + return ObjectUtils.nullSafeEquals(granularity, that.granularity); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(timeField); + result = 31 * result + ObjectUtils.nullSafeHashCode(metaField); + result = 31 * result + ObjectUtils.nullSafeHashCode(granularity); + return result; + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparer.java new file mode 100644 index 0000000000..f3769355c7 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparer.java @@ -0,0 +1,61 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.util.Assert; + +import com.mongodb.client.MongoCollection; + +/** + * Interface for functional preparation of a {@link MongoCollection}. + * + * @author Mark Paluch + * @since 4.1 + */ +public interface CollectionPreparer { + + /** + * Returns a preparer that always returns its input collection. + * + * @return a preparer that always returns its input collection. + */ + static CollectionPreparer identity() { + return it -> it; + } + + /** + * Prepare the {@code collection}. + * + * @param collection the collection to prepare. + * @return the prepared collection. + */ + T prepare(T collection); + + /** + * Returns a composed {@code CollectionPreparer} that first applies this preparer to the collection, and then applies + * the {@code after} preparer to the result. If evaluation of either function throws an exception, it is relayed to + * the caller of the composed function. + * + * @param after the collection preparer to apply after this function is applied. + * @return a composed {@code CollectionPreparer} that first applies this preparer and then applies the {@code after} + * preparer. + */ + default CollectionPreparer andThen(CollectionPreparer after) { + Assert.notNull(after, "After CollectionPreparer must not be null"); + return c -> after.prepare(prepare(c)); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparerSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparerSupport.java new file mode 100644 index 0000000000..644a3a54d1 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparerSupport.java @@ -0,0 +1,182 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Arrays; +import java.util.List; +import java.util.function.BiFunction; +import java.util.function.Function; + +import org.bson.Document; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; +import com.mongodb.client.MongoCollection; + +/** + * Support class for delegate implementations to apply {@link ReadConcern} and {@link ReadPreference} settings upon + * {@link CollectionPreparer preparing a collection}. + * + * @author Mark Paluch + * @since 4.1 + */ +class CollectionPreparerSupport implements ReadConcernAware, ReadPreferenceAware { + + private final List sources; + + private CollectionPreparerSupport(List sources) { + this.sources = sources; + } + + T doPrepare(T collection, Function concernAccessor, BiFunction concernFunction, + Function preferenceAccessor, BiFunction preferenceFunction) { + + T collectionToUse = collection; + + for (Object source : sources) { + if (source instanceof ReadConcernAware rca && rca.hasReadConcern()) { + + ReadConcern concern = rca.getReadConcern(); + if (concernAccessor.apply(collectionToUse) != concern) { + collectionToUse = concernFunction.apply(collectionToUse, concern); + } + break; + } + } + + for (Object source : sources) { + if (source instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) { + + ReadPreference preference = rpa.getReadPreference(); + if (preferenceAccessor.apply(collectionToUse) != preference) { + collectionToUse = preferenceFunction.apply(collectionToUse, preference); + } + break; + } + } + + return collectionToUse; + } + + @Override + public boolean hasReadConcern() { + + for (Object aware : sources) { + if (aware instanceof ReadConcernAware rca && rca.hasReadConcern()) { + return true; + } + } + + return false; + } + + @Override + public ReadConcern getReadConcern() { + + for (Object aware : sources) { + if (aware instanceof ReadConcernAware rca && rca.hasReadConcern()) { + return rca.getReadConcern(); + } + } + + return null; + } + + @Override + public boolean hasReadPreference() { + + for (Object aware : sources) { + if (aware instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) { + return true; + } + } + + return false; + } + + @Override + public ReadPreference getReadPreference() { + + for (Object aware : sources) { + if (aware instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) { + return rpa.getReadPreference(); + } + } + + return null; + } + + static class CollectionPreparerDelegate extends CollectionPreparerSupport + implements CollectionPreparer> { + + private CollectionPreparerDelegate(List sources) { + super(sources); + } + + public static CollectionPreparerDelegate of(ReadPreferenceAware... awares) { + return of((Object[]) awares); + } + + public static CollectionPreparerDelegate of(Object... mixedAwares) { + + if (mixedAwares.length == 1 && mixedAwares[0] instanceof CollectionPreparerDelegate) { + return (CollectionPreparerDelegate) mixedAwares[0]; + } + + return new CollectionPreparerDelegate(Arrays.asList(mixedAwares)); + } + + @Override + public MongoCollection prepare(MongoCollection collection) { + return doPrepare(collection, MongoCollection::getReadConcern, MongoCollection::withReadConcern, + MongoCollection::getReadPreference, MongoCollection::withReadPreference); + } + + } + + static class ReactiveCollectionPreparerDelegate extends CollectionPreparerSupport + implements CollectionPreparer> { + + private ReactiveCollectionPreparerDelegate(List sources) { + super(sources); + } + + public static ReactiveCollectionPreparerDelegate of(ReadPreferenceAware... awares) { + return of((Object[]) awares); + } + + public static ReactiveCollectionPreparerDelegate of(Object... mixedAwares) { + + if (mixedAwares.length == 1 && mixedAwares[0] instanceof CollectionPreparerDelegate) { + return (ReactiveCollectionPreparerDelegate) mixedAwares[0]; + } + + return new ReactiveCollectionPreparerDelegate(Arrays.asList(mixedAwares)); + } + + @Override + public com.mongodb.reactivestreams.client.MongoCollection prepare( + com.mongodb.reactivestreams.client.MongoCollection collection) { + return doPrepare(collection, // + com.mongodb.reactivestreams.client.MongoCollection::getReadConcern, + com.mongodb.reactivestreams.client.MongoCollection::withReadConcern, + com.mongodb.reactivestreams.client.MongoCollection::getReadPreference, + com.mongodb.reactivestreams.client.MongoCollection::withReadPreference); + } + + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CountQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CountQuery.java index ec7104c9de..4fa6b3e97d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CountQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CountQuery.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2021 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,8 +23,8 @@ import java.util.Map; import org.bson.Document; - import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.query.MetricConversion; import org.springframework.lang.Nullable; import org.springframework.util.ObjectUtils; @@ -38,7 +38,7 @@ */ class CountQuery { - private Document source; + private final Document source; private CountQuery(Document source) { this.source = source; @@ -64,18 +64,15 @@ public Document toQueryDocument() { for (Map.Entry entry : source.entrySet()) { - if (entry.getValue() instanceof Document && requiresRewrite(entry.getValue())) { + if (entry.getValue() instanceof Document document && requiresRewrite(entry.getValue())) { - Document theValue = (Document) entry.getValue(); - target.putAll(createGeoWithin(entry.getKey(), theValue, source.get("$and"))); + target.putAll(createGeoWithin(entry.getKey(), document, source.get("$and"))); continue; } - if (entry.getValue() instanceof Collection && requiresRewrite(entry.getValue())) { - - Collection source = (Collection) entry.getValue(); + if (entry.getValue() instanceof Collection collection && requiresRewrite(entry.getValue())) { - target.put(entry.getKey(), rewriteCollection(source)); + target.put(entry.getKey(), rewriteCollection(collection)); continue; } @@ -96,12 +93,12 @@ public Document toQueryDocument() { */ private boolean requiresRewrite(Object valueToInspect) { - if (valueToInspect instanceof Document) { - return requiresRewrite((Document) valueToInspect); + if (valueToInspect instanceof Document document) { + return requiresRewrite(document); } - if (valueToInspect instanceof Collection) { - return requiresRewrite((Collection) valueToInspect); + if (valueToInspect instanceof Collection collection) { + return requiresRewrite(collection); } return false; @@ -110,7 +107,7 @@ private boolean requiresRewrite(Object valueToInspect) { private boolean requiresRewrite(Collection collection) { for (Object o : collection) { - if (o instanceof Document && requiresRewrite((Document) o)) { + if (o instanceof Document document && requiresRewrite(document)) { return true; } } @@ -139,8 +136,8 @@ private Collection rewriteCollection(Collection source) { Collection rewrittenCollection = new ArrayList<>(source.size()); for (Object item : source) { - if (item instanceof Document && requiresRewrite(item)) { - rewrittenCollection.add(CountQuery.of((Document) item).toQueryDocument()); + if (item instanceof Document document && requiresRewrite(item)) { + rewrittenCollection.add(CountQuery.of(document).toQueryDocument()); } else { rewrittenCollection.add(item); } @@ -157,12 +154,14 @@ private Collection rewriteCollection(Collection source) { * @param $and potentially existing {@code $and} condition. * @return the rewritten query {@link Document}. */ + @SuppressWarnings("unchecked") private static Document createGeoWithin(String key, Document source, @Nullable Object $and) { boolean spheric = source.containsKey("$nearSphere"); Object $near = spheric ? source.get("$nearSphere") : source.get("$near"); - Number maxDistance = source.containsKey("$maxDistance") ? (Number) source.get("$maxDistance") : Double.MAX_VALUE; + Number maxDistance = getMaxDistance(source, $near, spheric); + List $centerMax = Arrays.asList(toCenterCoordinates($near), maxDistance); Document $geoWithinMax = new Document("$geoWithin", new Document(spheric ? "$centerSphere" : "$center", $centerMax)); @@ -176,23 +175,51 @@ private static Document createGeoWithin(String key, Document source, @Nullable O Document $geoWithinMin = new Document("$geoWithin", new Document(spheric ? "$centerSphere" : "$center", $centerMin)); - List criteria = new ArrayList<>(); + List criteria; if ($and != null) { if ($and instanceof Collection) { - criteria.addAll((Collection) $and); + Collection andElements = (Collection) $and; + criteria = new ArrayList<>(andElements.size() + 2); + criteria.addAll(andElements); } else { throw new IllegalArgumentException( - "Cannot rewrite query as it contains an '$and' element that is not a Collection!: Offending element: " + "Cannot rewrite query as it contains an '$and' element that is not a Collection: Offending element: " + $and); } + } else { + criteria = new ArrayList<>(2); } criteria.add(new Document("$nor", Collections.singletonList(new Document(key, $geoWithinMin)))); criteria.add(new Document(key, $geoWithinMax)); + return new Document("$and", criteria); } + private static Number getMaxDistance(Document source, Object $near, boolean spheric) { + + Number maxDistance = Double.MAX_VALUE; + + if (source.containsKey("$maxDistance")) { // legacy coordinate pair + return (Number) source.get("$maxDistance"); + } + + if ($near instanceof Document nearDoc) { + + if (nearDoc.containsKey("$maxDistance")) { + + maxDistance = (Number) nearDoc.get("$maxDistance"); + // geojson is in Meters but we need radians x/(6378.1*1000) + if (spheric && nearDoc.containsKey("$geometry")) { + maxDistance = MetricConversion.metersToRadians(maxDistance.doubleValue()); + } + } + } + + return maxDistance; + } + private static boolean containsNear(Document source) { return source.containsKey("$near") || source.containsKey("$nearSphere"); } @@ -212,14 +239,20 @@ private static Object toCenterCoordinates(Object value) { return value; } - if (value instanceof Point) { - return Arrays.asList(((Point) value).getX(), ((Point) value).getY()); + if (value instanceof Point point) { + return Arrays.asList(point.getX(), point.getY()); } - if (value instanceof Document && ((Document) value).containsKey("x")) { + if (value instanceof Document document) { + + if (document.containsKey("x")) { + return Arrays.asList(document.get("x"), document.get("y")); + } - Document point = (Document) value; - return Arrays.asList(point.get("x"), point.get("y")); + if (document.containsKey("$geometry")) { + Document geoJsonPoint = document.get("$geometry", Document.class); + return geoJsonPoint.get("coordinates"); + } } return value; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CursorPreparer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CursorPreparer.java index dd68932c3f..9b7408b0cf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CursorPreparer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CursorPreparer.java @@ -1,5 +1,5 @@ /* - * Copyright 2002-2021 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -49,7 +49,7 @@ public interface CursorPreparer extends ReadPreferenceAware { FindIterable prepare(FindIterable iterable); /** - * Apply query specific settings to {@link MongoCollection} and initate a find operation returning a + * Apply query specific settings to {@link MongoCollection} and initiate a find operation returning a * {@link FindIterable} via the given {@link Function find} function. * * @param collection must not be {@literal null}. @@ -61,8 +61,8 @@ public interface CursorPreparer extends ReadPreferenceAware { default FindIterable initiateFind(MongoCollection collection, Function, FindIterable> find) { - Assert.notNull(collection, "Collection must not be null!"); - Assert.notNull(find, "Find function must not be null!"); + Assert.notNull(collection, "Collection must not be null"); + Assert.notNull(find, "Find function must not be null"); if (hasReadPreference()) { collection = collection.withReadPreference(getReadPreference()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DbCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DbCallback.java index 8e9aa3522b..9d588ad16d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DbCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DbCallback.java @@ -1,5 +1,5 @@ /* - * Copyright 2010-2021 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultBulkOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultBulkOperations.java index f4d72f648b..52343522a7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultBulkOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultBulkOperations.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2021 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,42 +16,47 @@ package org.springframework.data.mongodb.core; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import org.bson.Document; -import org.bson.conversions.Bson; +import org.springframework.context.ApplicationEvent; import org.springframework.context.ApplicationEventPublisher; import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.data.mapping.callback.EntityCallback; import org.springframework.data.mapping.callback.EntityCallbacks; import org.springframework.data.mongodb.BulkOperationException; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.convert.UpdateMapper; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback; -import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback; import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback; -import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent; import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; import org.springframework.data.mongodb.core.query.UpdateDefinition; -import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; import org.springframework.data.util.Pair; import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import org.springframework.util.ObjectUtils; import com.mongodb.MongoBulkWriteException; import com.mongodb.WriteConcern; import com.mongodb.bulk.BulkWriteResult; import com.mongodb.client.MongoCollection; -import com.mongodb.client.model.*; +import com.mongodb.client.model.BulkWriteOptions; +import com.mongodb.client.model.DeleteManyModel; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.InsertOneModel; +import com.mongodb.client.model.ReplaceOneModel; +import com.mongodb.client.model.ReplaceOptions; +import com.mongodb.client.model.UpdateManyModel; +import com.mongodb.client.model.UpdateOneModel; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.client.model.WriteModel; /** * Default implementation for {@link BulkOperations}. @@ -67,7 +72,7 @@ * @author Jacob Botuck * @since 1.9 */ -class DefaultBulkOperations implements BulkOperations { +class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperations { private final MongoOperations mongoOperations; private final String collectionName; @@ -75,7 +80,6 @@ class DefaultBulkOperations implements BulkOperations { private final List models = new ArrayList<>(); private @Nullable WriteConcern defaultWriteConcern; - private BulkWriteOptions bulkOptions; /** @@ -90,14 +94,15 @@ class DefaultBulkOperations implements BulkOperations { DefaultBulkOperations(MongoOperations mongoOperations, String collectionName, BulkOperationContext bulkOperationContext) { - Assert.notNull(mongoOperations, "MongoOperations must not be null!"); - Assert.hasText(collectionName, "CollectionName must not be null nor empty!"); - Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null!"); + super(collectionName); + Assert.notNull(mongoOperations, "MongoOperations must not be null"); + Assert.hasText(collectionName, "CollectionName must not be null nor empty"); + Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null"); this.mongoOperations = mongoOperations; this.collectionName = collectionName; this.bulkOperationContext = bulkOperationContext; - this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode()); + this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode()); } /** @@ -109,14 +114,10 @@ void setDefaultWriteConcern(@Nullable WriteConcern defaultWriteConcern) { this.defaultWriteConcern = defaultWriteConcern; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#insert(java.lang.Object) - */ @Override public BulkOperations insert(Object document) { - Assert.notNull(document, "Document must not be null!"); + Assert.notNull(document, "Document must not be null"); maybeEmitEvent(new BeforeConvertEvent<>(document, collectionName)); Object source = maybeInvokeBeforeConvertCallback(document); @@ -125,93 +126,65 @@ public BulkOperations insert(Object document) { return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#insert(java.util.List) - */ @Override public BulkOperations insert(List documents) { - Assert.notNull(documents, "Documents must not be null!"); + Assert.notNull(documents, "Documents must not be null"); documents.forEach(this::insert); return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#updateOne(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update) - */ @Override - @SuppressWarnings("unchecked") - public BulkOperations updateOne(Query query, Update update) { + public BulkOperations updateOne(Query query, UpdateDefinition update) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(update, "Update must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); - return updateOne(Collections.singletonList(Pair.of(query, update))); + return update(query, update, false, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#updateOne(java.util.List) - */ @Override - public BulkOperations updateOne(List> updates) { + public BulkOperations updateOne(List> updates) { - Assert.notNull(updates, "Updates must not be null!"); + Assert.notNull(updates, "Updates must not be null"); - for (Pair update : updates) { + for (Pair update : updates) { update(update.getFirst(), update.getSecond(), false, false); } return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update) - */ @Override - @SuppressWarnings("unchecked") - public BulkOperations updateMulti(Query query, Update update) { + public BulkOperations updateMulti(Query query, UpdateDefinition update) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(update, "Update must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); - return updateMulti(Collections.singletonList(Pair.of(query, update))); + update(query, update, false, true); + + return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(java.util.List) - */ @Override - public BulkOperations updateMulti(List> updates) { + public BulkOperations updateMulti(List> updates) { - Assert.notNull(updates, "Updates must not be null!"); + Assert.notNull(updates, "Updates must not be null"); - for (Pair update : updates) { + for (Pair update : updates) { update(update.getFirst(), update.getSecond(), false, true); } return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update) - */ @Override - public BulkOperations upsert(Query query, Update update) { + public BulkOperations upsert(Query query, UpdateDefinition update) { return update(query, update, true, true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#upsert(java.util.List) - */ @Override public BulkOperations upsert(List> updates) { @@ -222,14 +195,10 @@ public BulkOperations upsert(List> updates) { return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#remove(org.springframework.data.mongodb.core.query.Query) - */ @Override public BulkOperations remove(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); DeleteOptions deleteOptions = new DeleteOptions(); query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation); @@ -239,14 +208,10 @@ public BulkOperations remove(Query query) { return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#remove(java.util.List) - */ @Override public BulkOperations remove(List removes) { - Assert.notNull(removes, "Removals must not be null!"); + Assert.notNull(removes, "Removals must not be null"); for (Query query : removes) { remove(query); @@ -255,33 +220,27 @@ public BulkOperations remove(List removes) { return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#replaceOne(org.springframework.data.mongodb.core.query.Query, java.lang.Object, org.springframework.data.mongodb.core.FindAndReplaceOptions) - */ @Override public BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(replacement, "Replacement must not be null!"); - Assert.notNull(options, "Options must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(replacement, "Replacement must not be null"); + Assert.notNull(options, "Options must not be null"); ReplaceOptions replaceOptions = new ReplaceOptions(); replaceOptions.upsert(options.isUpsert()); + if (query.isSorted()) { + replaceOptions.sort(query.getSortObject()); + } query.getCollation().map(Collation::toMongoCollation).ifPresent(replaceOptions::collation); maybeEmitEvent(new BeforeConvertEvent<>(replacement, collectionName)); Object source = maybeInvokeBeforeConvertCallback(replacement); - addModel(source, - new ReplaceOneModel<>(getMappedQuery(query.getQueryObject()), getMappedObject(source), replaceOptions)); + addModel(source, new ReplaceOneModel<>(query.getQueryObject(), getMappedObject(source), replaceOptions)); return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#executeBulk() - */ @Override public com.mongodb.bulk.BulkWriteResult execute() { @@ -289,14 +248,14 @@ public com.mongodb.bulk.BulkWriteResult execute() { com.mongodb.bulk.BulkWriteResult result = mongoOperations.execute(collectionName, this::bulkWriteTo); - Assert.state(result != null, "Result must not be null."); + Assert.state(result != null, "Result must not be null"); models.forEach(this::maybeEmitAfterSaveEvent); models.forEach(this::maybeInvokeAfterSaveCallback); return result; } finally { - this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode()); + this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode()); } } @@ -315,9 +274,8 @@ private BulkWriteResult bulkWriteTo(MongoCollection collection) { bulkOptions); } catch (RuntimeException ex) { - if (ex instanceof MongoBulkWriteException) { + if (ex instanceof MongoBulkWriteException mongoBulkWriteException) { - MongoBulkWriteException mongoBulkWriteException = (MongoBulkWriteException) ex; if (mongoBulkWriteException.getWriteConcernError() != null) { throw new DataIntegrityViolationException(ex.getMessage(), ex); } @@ -332,17 +290,17 @@ private WriteModel extractAndMapWriteModel(SourceAwareWriteModelHolder maybeEmitBeforeSaveEvent(it); - if (it.getModel() instanceof InsertOneModel) { + if (it.model() instanceof InsertOneModel model) { - Document target = ((InsertOneModel) it.getModel()).getDocument(); - maybeInvokeBeforeSaveCallback(it.getSource(), target); - } else if (it.getModel() instanceof ReplaceOneModel) { + Document target = model.getDocument(); + maybeInvokeBeforeSaveCallback(it.source(), target); + } else if (it.model() instanceof ReplaceOneModel model) { - Document target = ((ReplaceOneModel) it.getModel()).getReplacement(); - maybeInvokeBeforeSaveCallback(it.getSource(), target); + Document target = model.getReplacement(); + maybeInvokeBeforeSaveCallback(it.source(), target); } - return mapWriteModel(it.getModel()); + return mapWriteModel(it.source(), it.model()); } /** @@ -354,12 +312,12 @@ private WriteModel extractAndMapWriteModel(SourceAwareWriteModelHolder * @param multi whether to issue a multi-update. * @return the {@link BulkOperations} with the update registered. */ - private BulkOperations update(Query query, Update update, boolean upsert, boolean multi) { + private BulkOperations update(Query query, UpdateDefinition update, boolean upsert, boolean multi) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(update, "Update must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); - UpdateOptions options = computeUpdateOptions(query, update, upsert); + UpdateOptions options = computeUpdateOptions(query, update, upsert, multi); if (multi) { addModel(update, new UpdateManyModel<>(query.getQueryObject(), update.getUpdateObject(), options)); @@ -370,53 +328,30 @@ private BulkOperations update(Query query, Update update, boolean upsert, boolea return this; } - private WriteModel mapWriteModel(WriteModel writeModel) { - - if (writeModel instanceof UpdateOneModel) { - - UpdateOneModel model = (UpdateOneModel) writeModel; - - return new UpdateOneModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()), - model.getOptions()); - } - - if (writeModel instanceof UpdateManyModel) { - - UpdateManyModel model = (UpdateManyModel) writeModel; - - return new UpdateManyModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()), - model.getOptions()); - } - - if (writeModel instanceof DeleteOneModel) { - - DeleteOneModel model = (DeleteOneModel) writeModel; - - return new DeleteOneModel<>(getMappedQuery(model.getFilter()), model.getOptions()); - } - - if (writeModel instanceof DeleteManyModel) { - - DeleteManyModel model = (DeleteManyModel) writeModel; - - return new DeleteManyModel<>(getMappedQuery(model.getFilter()), model.getOptions()); - } + @Override + protected void maybeEmitEvent(ApplicationEvent event) { + bulkOperationContext.publishEvent(event); + } - return writeModel; + @Override + protected UpdateMapper updateMapper() { + return bulkOperationContext.updateMapper(); } - private Bson getMappedUpdate(Bson update) { - return bulkOperationContext.getUpdateMapper().getMappedObject(update, bulkOperationContext.getEntity()); + @Override + protected QueryMapper queryMapper() { + return bulkOperationContext.queryMapper(); } - private Bson getMappedQuery(Bson query) { - return bulkOperationContext.getQueryMapper().getMappedObject(query, bulkOperationContext.getEntity()); + @Override + protected Optional> entity() { + return bulkOperationContext.entity(); } private Document getMappedObject(Object source) { - if (source instanceof Document) { - return (Document) source; + if (source instanceof Document document) { + return document; } Document sink = new Document(); @@ -429,268 +364,83 @@ private void addModel(Object source, WriteModel model) { models.add(new SourceAwareWriteModelHolder(source, model)); } - private void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) { - - if (holder.getModel() instanceof InsertOneModel) { - - Document target = ((InsertOneModel) holder.getModel()).getDocument(); - maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName)); - } else if (holder.getModel() instanceof ReplaceOneModel) { - - Document target = ((ReplaceOneModel) holder.getModel()).getReplacement(); - maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName)); - } - } - - private void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) { - - if (holder.getModel() instanceof InsertOneModel) { - - Document target = ((InsertOneModel) holder.getModel()).getDocument(); - maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName)); - } else if (holder.getModel() instanceof ReplaceOneModel) { - - Document target = ((ReplaceOneModel) holder.getModel()).getReplacement(); - maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName)); - } - } - private void maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) { - if (holder.getModel() instanceof InsertOneModel) { + if (holder.model() instanceof InsertOneModel model) { - Document target = ((InsertOneModel) holder.getModel()).getDocument(); - maybeInvokeAfterSaveCallback(holder.getSource(), target); - } else if (holder.getModel() instanceof ReplaceOneModel) { + Document target = model.getDocument(); + maybeInvokeAfterSaveCallback(holder.source(), target); + } else if (holder.model() instanceof ReplaceOneModel model) { - Document target = ((ReplaceOneModel) holder.getModel()).getReplacement(); - maybeInvokeAfterSaveCallback(holder.getSource(), target); + Document target = model.getReplacement(); + maybeInvokeAfterSaveCallback(holder.source(), target); } } - private , T> E maybeEmitEvent(E event) { - - if (bulkOperationContext.getEventPublisher() == null) { - return event; - } - - bulkOperationContext.getEventPublisher().publishEvent(event); - return event; + private void publishEvent(MongoMappingEvent event) { + bulkOperationContext.publishEvent(event); } private Object maybeInvokeBeforeConvertCallback(Object value) { - - if (bulkOperationContext.getEntityCallbacks() == null) { - return value; - } - - return bulkOperationContext.getEntityCallbacks().callback(BeforeConvertCallback.class, value, collectionName); + return bulkOperationContext.callback(BeforeConvertCallback.class, value, collectionName); } private Object maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) { - - if (bulkOperationContext.getEntityCallbacks() == null) { - return value; - } - - return bulkOperationContext.getEntityCallbacks().callback(BeforeSaveCallback.class, value, mappedDocument, - collectionName); + return bulkOperationContext.callback(BeforeSaveCallback.class, value, mappedDocument, collectionName); } private Object maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) { - - if (bulkOperationContext.getEntityCallbacks() == null) { - return value; - } - - return bulkOperationContext.getEntityCallbacks().callback(AfterSaveCallback.class, value, mappedDocument, - collectionName); - } - - private static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) { - - BulkWriteOptions options = new BulkWriteOptions(); - - switch (bulkMode) { - case ORDERED: - return options.ordered(true); - case UNORDERED: - return options.ordered(false); - } - - throw new IllegalStateException("BulkMode was null!"); - } - - /** - * @param filterQuery The {@link Query} to read a potential {@link Collation} from. Must not be {@literal null}. - * @param update The {@link Update} to apply - * @param upsert flag to indicate if document should be upserted. - * @return new instance of {@link UpdateOptions}. - */ - private static UpdateOptions computeUpdateOptions(Query filterQuery, UpdateDefinition update, boolean upsert) { - - UpdateOptions options = new UpdateOptions(); - options.upsert(upsert); - - if (update.hasArrayFilters()) { - List list = new ArrayList<>(update.getArrayFilters().size()); - for (ArrayFilter arrayFilter : update.getArrayFilters()) { - list.add(arrayFilter.asDocument()); - } - options.arrayFilters(list); - } - - filterQuery.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation); - return options; + return bulkOperationContext.callback(AfterSaveCallback.class, value, mappedDocument, collectionName); } /** - * {@link BulkOperationContext} holds information about - * {@link org.springframework.data.mongodb.core.BulkOperations.BulkMode} the entity in use as well as references to + * {@link BulkOperationContext} holds information about {@link BulkMode} the entity in use as well as references to * {@link QueryMapper} and {@link UpdateMapper}. * * @author Christoph Strobl * @since 2.0 */ - static final class BulkOperationContext { - - private final BulkMode bulkMode; - private final Optional> entity; - private final QueryMapper queryMapper; - private final UpdateMapper updateMapper; - private final ApplicationEventPublisher eventPublisher; - private final EntityCallbacks entityCallbacks; - - BulkOperationContext(BulkOperations.BulkMode bulkMode, Optional> entity, - QueryMapper queryMapper, UpdateMapper updateMapper, ApplicationEventPublisher eventPublisher, - EntityCallbacks entityCallbacks) { - - this.bulkMode = bulkMode; - this.entity = entity; - this.queryMapper = queryMapper; - this.updateMapper = updateMapper; - this.eventPublisher = eventPublisher; - this.entityCallbacks = entityCallbacks; - } - - public BulkMode getBulkMode() { - return this.bulkMode; - } - - public Optional> getEntity() { - return this.entity; - } - - public QueryMapper getQueryMapper() { - return this.queryMapper; - } - - public UpdateMapper getUpdateMapper() { - return this.updateMapper; - } + record BulkOperationContext(BulkMode bulkMode, Optional> entity, + QueryMapper queryMapper, UpdateMapper updateMapper, @Nullable ApplicationEventPublisher eventPublisher, + @Nullable EntityCallbacks entityCallbacks) { - public ApplicationEventPublisher getEventPublisher() { - return this.eventPublisher; + public boolean skipEntityCallbacks() { + return entityCallbacks == null; } - public EntityCallbacks getEntityCallbacks() { - return this.entityCallbacks; + public boolean skipEventPublishing() { + return eventPublisher == null; } - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - - BulkOperationContext that = (BulkOperationContext) o; + @SuppressWarnings("rawtypes") + public T callback(Class callbackType, T entity, String collectionName) { - if (bulkMode != that.bulkMode) - return false; - if (!ObjectUtils.nullSafeEquals(this.entity, that.entity)) { - return false; - } - if (!ObjectUtils.nullSafeEquals(this.queryMapper, that.queryMapper)) { - return false; - } - if (!ObjectUtils.nullSafeEquals(this.updateMapper, that.updateMapper)) { - return false; - } - if (!ObjectUtils.nullSafeEquals(this.eventPublisher, that.eventPublisher)) { - return false; + if (skipEntityCallbacks()) { + return entity; } - return ObjectUtils.nullSafeEquals(this.entityCallbacks, that.entityCallbacks); - } - - @Override - public int hashCode() { - int result = bulkMode != null ? bulkMode.hashCode() : 0; - result = 31 * result + ObjectUtils.nullSafeHashCode(entity); - result = 31 * result + ObjectUtils.nullSafeHashCode(queryMapper); - result = 31 * result + ObjectUtils.nullSafeHashCode(updateMapper); - result = 31 * result + ObjectUtils.nullSafeHashCode(eventPublisher); - result = 31 * result + ObjectUtils.nullSafeHashCode(entityCallbacks); - return result; - } - public String toString() { - return "DefaultBulkOperations.BulkOperationContext(bulkMode=" + this.getBulkMode() + ", entity=" - + this.getEntity() + ", queryMapper=" + this.getQueryMapper() + ", updateMapper=" + this.getUpdateMapper() - + ", eventPublisher=" + this.getEventPublisher() + ", entityCallbacks=" + this.getEntityCallbacks() + ")"; + return entityCallbacks.callback(callbackType, entity, collectionName); } - } - - /** - * Value object chaining together an actual source with its {@link WriteModel} representation. - * - * @since 2.2 - * @author Christoph Strobl - */ - private static final class SourceAwareWriteModelHolder { - - private final Object source; - private final WriteModel model; - SourceAwareWriteModelHolder(Object source, WriteModel model) { + @SuppressWarnings("rawtypes") + public T callback(Class callbackType, T entity, Document document, + String collectionName) { - this.source = source; - this.model = model; - } - - public Object getSource() { - return this.source; - } + if (skipEntityCallbacks()) { + return entity; + } - public WriteModel getModel() { - return this.model; + return entityCallbacks.callback(callbackType, entity, document, collectionName); } - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - - SourceAwareWriteModelHolder that = (SourceAwareWriteModelHolder) o; + public void publishEvent(ApplicationEvent event) { - if (!ObjectUtils.nullSafeEquals(this.source, that.source)) { - return false; + if (skipEventPublishing()) { + return; } - return ObjectUtils.nullSafeEquals(this.model, that.model); - } - @Override - public int hashCode() { - int result = ObjectUtils.nullSafeHashCode(model); - result = 31 * result + ObjectUtils.nullSafeHashCode(source); - return result; - } - - public String toString() { - return "DefaultBulkOperations.SourceAwareWriteModelHolder(source=" + this.getSource() + ", model=" - + this.getModel() + ")"; + eventPublisher.publishEvent(event); } } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java index 010f494f6a..2057e2f046 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java @@ -1,5 +1,5 @@ /* - * Copyright 2011-2021 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,6 +22,7 @@ import org.bson.Document; import org.springframework.dao.DataAccessException; import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.UncategorizedMongoDbException; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.index.IndexDefinition; import org.springframework.data.mongodb.core.index.IndexInfo; @@ -29,6 +30,7 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.NumberUtils; import com.mongodb.MongoException; import com.mongodb.client.MongoCollection; @@ -52,7 +54,7 @@ public class DefaultIndexOperations implements IndexOperations { private final QueryMapper mapper; private final @Nullable Class type; - private MongoOperations mongoOperations; + private final MongoOperations mongoOperations; /** * Creates a new {@link DefaultIndexOperations}. @@ -83,9 +85,9 @@ public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collec public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper, @Nullable Class type) { - Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null!"); - Assert.notNull(collectionName, "Collection name can not be null!"); - Assert.notNull(queryMapper, "QueryMapper must not be null!"); + Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null"); + Assert.notNull(collectionName, "Collection name can not be null"); + Assert.notNull(queryMapper, "QueryMapper must not be null"); this.collectionName = collectionName; this.mapper = queryMapper; @@ -103,8 +105,8 @@ public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collec */ public DefaultIndexOperations(MongoOperations mongoOperations, String collectionName, @Nullable Class type) { - Assert.notNull(mongoOperations, "MongoOperations must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(mongoOperations, "MongoOperations must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); this.mongoOperations = mongoOperations; this.mapper = new QueryMapper(mongoOperations.getConverter()); @@ -112,11 +114,8 @@ public DefaultIndexOperations(MongoOperations mongoOperations, String collection this.type = type; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition) - */ - public String ensureIndex(final IndexDefinition indexDefinition) { + @Override + public String ensureIndex(IndexDefinition indexDefinition) { return execute(collection -> { @@ -150,11 +149,8 @@ private MongoPersistentEntity lookupPersistentEntity(@Nullable Class entit return null; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexOperations#dropIndex(java.lang.String) - */ - public void dropIndex(final String name) { + @Override + public void dropIndex(String name) { execute(collection -> { collection.dropIndex(name); @@ -163,18 +159,27 @@ public void dropIndex(final String name) { } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexOperations#dropAllIndexes() - */ + @Override + public void alterIndex(String name, org.springframework.data.mongodb.core.index.IndexOptions options) { + + Document indexOptions = new Document("name", name); + indexOptions.putAll(options.toDocument()); + + Document result = mongoOperations + .execute(db -> db.runCommand(new Document("collMod", collectionName).append("index", indexOptions))); + + if (NumberUtils.convertNumberToTargetClass(result.get("ok", (Number) 0), Integer.class) != 1) { + throw new UncategorizedMongoDbException( + "Index '%s' could not be modified. Response was %s".formatted(name, result.toJson()), null); + } + } + + @Override public void dropAllIndexes() { dropIndex("*"); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexOperations#getIndexInfo() - */ + @Override public List getIndexInfo() { return execute(new CollectionCallback>() { @@ -188,7 +193,8 @@ public List doInCollection(MongoCollection collection) private List getIndexData(MongoCursor cursor) { - List indexInfoList = new ArrayList<>(); + int available = cursor.available(); + List indexInfoList = available > 0 ? new ArrayList<>(available) : new ArrayList<>(); while (cursor.hasNext()) { @@ -205,11 +211,7 @@ private List getIndexData(MongoCursor cursor) { @Nullable public T execute(CollectionCallback callback) { - Assert.notNull(callback, "CollectionCallback must not be null!"); - - if (type != null) { - return mongoOperations.execute(type, callback); - } + Assert.notNull(callback, "CollectionCallback must not be null"); return mongoOperations.execute(collectionName, callback); } @@ -226,7 +228,8 @@ private IndexOptions addPartialFilterIfPresent(IndexOptions ops, Document source mapper.getMappedSort((Document) sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY), entity)); } - private static IndexOptions addDefaultCollationIfRequired(IndexOptions ops, MongoPersistentEntity entity) { + private static IndexOptions addDefaultCollationIfRequired(IndexOptions ops, + @Nullable MongoPersistentEntity entity) { if (ops.getCollation() != null || entity == null || !entity.hasCollation()) { return ops; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java index 4ca69116df..e2471dbb14 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -42,10 +42,6 @@ class DefaultIndexOperationsProvider implements IndexOperationsProvider { this.mapper = mapper; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexOperationsProvider#reactiveIndexOps(java.lang.String) - */ @Override public IndexOperations indexOps(String collectionName, Class type) { return new DefaultIndexOperations(mongoDbFactory, collectionName, mapper, type); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperations.java new file mode 100644 index 0000000000..59b7ccd63e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperations.java @@ -0,0 +1,393 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.springframework.context.ApplicationEvent; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.data.mapping.callback.EntityCallback; +import org.springframework.data.mapping.callback.ReactiveEntityCallbacks; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.WriteConcern; +import com.mongodb.bulk.BulkWriteResult; +import com.mongodb.client.model.BulkWriteOptions; +import com.mongodb.client.model.DeleteManyModel; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.InsertOneModel; +import com.mongodb.client.model.ReplaceOneModel; +import com.mongodb.client.model.ReplaceOptions; +import com.mongodb.client.model.UpdateManyModel; +import com.mongodb.client.model.UpdateOneModel; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.reactivestreams.client.MongoCollection; + +/** + * Default implementation for {@link ReactiveBulkOperations}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 4.1 + */ +class DefaultReactiveBulkOperations extends BulkOperationsSupport implements ReactiveBulkOperations { + + private final ReactiveMongoOperations mongoOperations; + private final String collectionName; + private final ReactiveBulkOperationContext bulkOperationContext; + private final List> models = new ArrayList<>(); + + private @Nullable WriteConcern defaultWriteConcern; + + private BulkWriteOptions bulkOptions; + + /** + * Creates a new {@link DefaultReactiveBulkOperations} for the given {@link MongoOperations}, collection name and + * {@link ReactiveBulkOperationContext}. + * + * @param mongoOperations must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param bulkOperationContext must not be {@literal null}. + */ + DefaultReactiveBulkOperations(ReactiveMongoOperations mongoOperations, String collectionName, + ReactiveBulkOperationContext bulkOperationContext) { + + super(collectionName); + + Assert.notNull(mongoOperations, "MongoOperations must not be null"); + Assert.hasText(collectionName, "CollectionName must not be null nor empty"); + Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null"); + + this.mongoOperations = mongoOperations; + this.collectionName = collectionName; + this.bulkOperationContext = bulkOperationContext; + this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode()); + } + + /** + * Configures the default {@link WriteConcern} to be used. Defaults to {@literal null}. + * + * @param defaultWriteConcern can be {@literal null}. + */ + void setDefaultWriteConcern(@Nullable WriteConcern defaultWriteConcern) { + this.defaultWriteConcern = defaultWriteConcern; + } + + @Override + public ReactiveBulkOperations insert(Object document) { + + Assert.notNull(document, "Document must not be null"); + + this.models.add(Mono.just(document).flatMap(it -> { + maybeEmitEvent(new BeforeConvertEvent<>(it, collectionName)); + return maybeInvokeBeforeConvertCallback(it); + }).map(it -> new SourceAwareWriteModelHolder(it, new InsertOneModel<>(getMappedObject(it))))); + + return this; + } + + @Override + public ReactiveBulkOperations insert(List documents) { + + Assert.notNull(documents, "Documents must not be null"); + + documents.forEach(this::insert); + + return this; + } + + @Override + public ReactiveBulkOperations updateOne(Query query, UpdateDefinition update) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); + + update(query, update, false, false); + return this; + } + + @Override + public ReactiveBulkOperations updateMulti(Query query, UpdateDefinition update) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); + + update(query, update, false, true); + return this; + } + + @Override + public ReactiveBulkOperations upsert(Query query, UpdateDefinition update) { + return update(query, update, true, true); + } + + @Override + public ReactiveBulkOperations remove(Query query) { + + Assert.notNull(query, "Query must not be null"); + + DeleteOptions deleteOptions = new DeleteOptions(); + query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation); + + this.models.add(Mono.just(query) + .map(it -> new SourceAwareWriteModelHolder(it, new DeleteManyModel<>(it.getQueryObject(), deleteOptions)))); + + return this; + } + + @Override + public ReactiveBulkOperations remove(List removes) { + + Assert.notNull(removes, "Removals must not be null"); + + for (Query query : removes) { + remove(query); + } + + return this; + } + + @Override + public ReactiveBulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(replacement, "Replacement must not be null"); + Assert.notNull(options, "Options must not be null"); + + ReplaceOptions replaceOptions = new ReplaceOptions(); + replaceOptions.upsert(options.isUpsert()); + if (query.isSorted()) { + replaceOptions.sort(query.getSortObject()); + } + query.getCollation().map(Collation::toMongoCollation).ifPresent(replaceOptions::collation); + + this.models.add(Mono.just(replacement).flatMap(it -> { + maybeEmitEvent(new BeforeConvertEvent<>(it, collectionName)); + return maybeInvokeBeforeConvertCallback(it); + }).map(it -> new SourceAwareWriteModelHolder(it, + new ReplaceOneModel<>(query.getQueryObject(), getMappedObject(it), replaceOptions)))); + + return this; + } + + @Override + public Mono execute() { + + try { + return mongoOperations.execute(collectionName, this::bulkWriteTo).next(); + } finally { + this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode()); + } + } + + private Mono bulkWriteTo(MongoCollection collection) { + + if (defaultWriteConcern != null) { + collection = collection.withWriteConcern(defaultWriteConcern); + } + + Flux concat = Flux.concat(models).flatMapSequential(it -> { + + if (it.model() instanceof InsertOneModel iom) { + + Document target = iom.getDocument(); + maybeEmitBeforeSaveEvent(it); + return maybeInvokeBeforeSaveCallback(it.source(), target) + .map(afterCallback -> new SourceAwareWriteModelHolder(afterCallback, mapWriteModel(afterCallback, iom))); + } else if (it.model() instanceof ReplaceOneModel rom) { + + Document target = rom.getReplacement(); + maybeEmitBeforeSaveEvent(it); + return maybeInvokeBeforeSaveCallback(it.source(), target) + .map(afterCallback -> new SourceAwareWriteModelHolder(afterCallback, mapWriteModel(afterCallback, rom))); + } + + return Mono.just(new SourceAwareWriteModelHolder(it.source(), mapWriteModel(it.source(), it.model()))); + }); + + MongoCollection theCollection = collection; + return concat.collectList().flatMap(it -> { + + return Mono + .from(theCollection + .bulkWrite(it.stream().map(SourceAwareWriteModelHolder::model).collect(Collectors.toList()), bulkOptions)) + .doOnSuccess(state -> { + it.forEach(this::maybeEmitAfterSaveEvent); + }).flatMap(state -> { + List> monos = it.stream().map(this::maybeInvokeAfterSaveCallback).collect(Collectors.toList()); + + return Flux.concat(monos).then(Mono.just(state)); + }); + }); + } + + /** + * Performs update and upsert bulk operations. + * + * @param query the {@link Query} to determine documents to update. + * @param update the {@link Update} to perform, must not be {@literal null}. + * @param upsert whether to upsert. + * @param multi whether to issue a multi-update. + * @return the {@link BulkOperations} with the update registered. + */ + private ReactiveBulkOperations update(Query query, UpdateDefinition update, boolean upsert, boolean multi) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); + + UpdateOptions options = computeUpdateOptions(query, update, upsert, multi); + + this.models.add(Mono.just(update).map(it -> { + if (multi) { + return new SourceAwareWriteModelHolder(update, + new UpdateManyModel<>(query.getQueryObject(), it.getUpdateObject(), options)); + } + return new SourceAwareWriteModelHolder(update, + new UpdateOneModel<>(query.getQueryObject(), it.getUpdateObject(), options)); + })); + + return this; + } + + @Override + protected void maybeEmitEvent(ApplicationEvent event) { + bulkOperationContext.publishEvent(event); + } + + @Override + protected UpdateMapper updateMapper() { + return bulkOperationContext.updateMapper(); + } + + @Override + protected QueryMapper queryMapper() { + return bulkOperationContext.queryMapper(); + } + + @Override + protected Optional> entity() { + return bulkOperationContext.entity(); + } + + private Document getMappedObject(Object source) { + + if (source instanceof Document) { + return (Document) source; + } + + Document sink = new Document(); + + mongoOperations.getConverter().write(source, sink); + return sink; + } + + private Mono maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) { + + if (holder.model() instanceof InsertOneModel) { + + Document target = ((InsertOneModel) holder.model()).getDocument(); + return maybeInvokeAfterSaveCallback(holder.source(), target); + } else if (holder.model() instanceof ReplaceOneModel) { + + Document target = ((ReplaceOneModel) holder.model()).getReplacement(); + return maybeInvokeAfterSaveCallback(holder.source(), target); + } + return Mono.just(holder.source()); + } + + private Mono maybeInvokeBeforeConvertCallback(Object value) { + return bulkOperationContext.callback(ReactiveBeforeConvertCallback.class, value, collectionName); + } + + private Mono maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) { + return bulkOperationContext.callback(ReactiveBeforeSaveCallback.class, value, mappedDocument, collectionName); + } + + private Mono maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) { + return bulkOperationContext.callback(ReactiveAfterSaveCallback.class, value, mappedDocument, collectionName); + } + + /** + * {@link ReactiveBulkOperationContext} holds information about {@link BulkMode} the entity in use as well as + * references to {@link QueryMapper} and {@link UpdateMapper}. + * + * @author Christoph Strobl + * @since 2.0 + */ + record ReactiveBulkOperationContext(BulkMode bulkMode, Optional> entity, + QueryMapper queryMapper, UpdateMapper updateMapper, @Nullable ApplicationEventPublisher eventPublisher, + @Nullable ReactiveEntityCallbacks entityCallbacks) { + + public boolean skipEntityCallbacks() { + return entityCallbacks == null; + } + + public boolean skipEventPublishing() { + return eventPublisher == null; + } + + @SuppressWarnings("rawtypes") + public Mono callback(Class callbackType, T entity, String collectionName) { + + if (skipEntityCallbacks()) { + return Mono.just(entity); + } + + return entityCallbacks.callback(callbackType, entity, collectionName); + } + + @SuppressWarnings("rawtypes") + public Mono callback(Class callbackType, T entity, Document document, + String collectionName) { + + if (skipEntityCallbacks()) { + return Mono.just(entity); + } + + return entityCallbacks.callback(callbackType, entity, document, collectionName); + } + + public void publishEvent(ApplicationEvent event) { + + if (skipEventPublishing()) { + return; + } + + eventPublisher.publishEvent(event); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java index 832d91a1a7..8e78f421f4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,6 +22,7 @@ import java.util.Optional; import org.bson.Document; +import org.springframework.data.mongodb.UncategorizedMongoDbException; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.index.IndexDefinition; import org.springframework.data.mongodb.core.index.IndexInfo; @@ -29,6 +30,7 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.NumberUtils; import com.mongodb.client.model.IndexOptions; @@ -76,9 +78,9 @@ public DefaultReactiveIndexOperations(ReactiveMongoOperations mongoOperations, S private DefaultReactiveIndexOperations(ReactiveMongoOperations mongoOperations, String collectionName, QueryMapper queryMapper, Optional> type) { - Assert.notNull(mongoOperations, "ReactiveMongoOperations must not be null!"); - Assert.notNull(collectionName, "Collection must not be null!"); - Assert.notNull(queryMapper, "QueryMapper must not be null!"); + Assert.notNull(mongoOperations, "ReactiveMongoOperations must not be null"); + Assert.notNull(collectionName, "Collection must not be null"); + Assert.notNull(queryMapper, "QueryMapper must not be null"); this.mongoOperations = mongoOperations; this.collectionName = collectionName; @@ -86,11 +88,8 @@ private DefaultReactiveIndexOperations(ReactiveMongoOperations mongoOperations, this.type = type; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition) - */ - public Mono ensureIndex(final IndexDefinition indexDefinition) { + @Override + public Mono ensureIndex(IndexDefinition indexDefinition) { return mongoOperations.execute(collectionName, collection -> { @@ -108,6 +107,23 @@ public Mono ensureIndex(final IndexDefinition indexDefinition) { }).next(); } + @Override + public Mono alterIndex(String name, org.springframework.data.mongodb.core.index.IndexOptions options) { + + return mongoOperations.execute(db -> { + Document indexOptions = new Document("name", name); + indexOptions.putAll(options.toDocument()); + + return Flux.from(db.runCommand(new Document("collMod", collectionName).append("index", indexOptions))) + .doOnNext(result -> { + if (NumberUtils.convertNumberToTargetClass(result.get("ok", (Number) 0), Integer.class) != 1) { + throw new UncategorizedMongoDbException( + "Index '%s' could not be modified. Response was %s".formatted(name, result.toJson()), null); + } + }); + }).then(); + } + @Nullable private MongoPersistentEntity lookupPersistentEntity(String collection) { @@ -119,26 +135,17 @@ private MongoPersistentEntity lookupPersistentEntity(String collection) { .orElse(null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#dropIndex(java.lang.String) - */ - public Mono dropIndex(final String name) { + @Override + public Mono dropIndex(String name) { return mongoOperations.execute(collectionName, collection -> collection.dropIndex(name)).then(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#dropAllIndexes() - */ + @Override public Mono dropAllIndexes() { return dropIndex("*"); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#getIndexInfo() - */ + @Override public Flux getIndexInfo() { return mongoOperations.execute(collectionName, collection -> collection.listIndexes(Document.class)) // @@ -157,7 +164,8 @@ private IndexOptions addPartialFilterIfPresent(IndexOptions ops, Document source queryMapper.getMappedObject((Document) sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY), entity)); } - private static IndexOptions addDefaultCollationIfRequired(IndexOptions ops, MongoPersistentEntity entity) { + private static IndexOptions addDefaultCollationIfRequired(IndexOptions ops, + @Nullable MongoPersistentEntity entity) { if (ops.getCollation() != null || entity == null || !entity.hasCollation()) { return ops; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultScriptOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultScriptOperations.java index 309b064310..b236b4df28 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultScriptOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultScriptOperations.java @@ -1,5 +1,5 @@ /* - * Copyright 2014-2021 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -29,9 +29,9 @@ import org.bson.Document; import org.bson.types.ObjectId; import org.springframework.dao.DataAccessException; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.script.ExecutableMongoScript; import org.springframework.data.mongodb.core.script.NamedMongoScript; -import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; import org.springframework.util.ObjectUtils; @@ -65,41 +65,29 @@ class DefaultScriptOperations implements ScriptOperations { */ public DefaultScriptOperations(MongoOperations mongoOperations) { - Assert.notNull(mongoOperations, "MongoOperations must not be null!"); + Assert.notNull(mongoOperations, "MongoOperations must not be null"); this.mongoOperations = mongoOperations; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#register(org.springframework.data.mongodb.core.script.ExecutableMongoScript) - */ @Override public NamedMongoScript register(ExecutableMongoScript script) { return register(new NamedMongoScript(generateScriptName(), script)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#register(org.springframework.data.mongodb.core.script.NamedMongoScript) - */ @Override public NamedMongoScript register(NamedMongoScript script) { - Assert.notNull(script, "Script must not be null!"); + Assert.notNull(script, "Script must not be null"); mongoOperations.save(script, SCRIPT_COLLECTION_NAME); return script; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#execute(org.springframework.data.mongodb.core.script.ExecutableMongoScript, java.lang.Object[]) - */ @Override - public Object execute(final ExecutableMongoScript script, final Object... args) { + public Object execute(ExecutableMongoScript script, Object... args) { - Assert.notNull(script, "Script must not be null!"); + Assert.notNull(script, "Script must not be null"); return mongoOperations.execute(new DbCallback() { @@ -115,14 +103,10 @@ public Object doInDB(MongoDatabase db) throws MongoException, DataAccessExceptio }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#call(java.lang.String, java.lang.Object[]) - */ @Override - public Object call(final String scriptName, final Object... args) { + public Object call(String scriptName, Object... args) { - Assert.hasText(scriptName, "ScriptName must not be null or empty!"); + Assert.hasText(scriptName, "ScriptName must not be null or empty"); return mongoOperations.execute(new DbCallback() { @@ -135,22 +119,15 @@ public Object doInDB(MongoDatabase db) throws MongoException, DataAccessExceptio }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#exists(java.lang.String) - */ @Override public boolean exists(String scriptName) { - Assert.hasText(scriptName, "ScriptName must not be null or empty!"); + Assert.hasText(scriptName, "ScriptName must not be null or empty"); - return mongoOperations.exists(query(where("_id").is(scriptName)), NamedMongoScript.class, SCRIPT_COLLECTION_NAME); + return mongoOperations.exists(query(where(FieldName.ID.name()).is(scriptName)), NamedMongoScript.class, + SCRIPT_COLLECTION_NAME); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#getScriptNames() - */ @Override public Set getScriptNames() { @@ -175,7 +152,7 @@ private Object[] convertScriptArgs(boolean quote, Object... args) { return args; } - List convertedValues = new ArrayList(args.length); + List convertedValues = new ArrayList<>(args.length); for (Object arg : args) { convertedValues.add(arg instanceof String && quote ? String.format("'%s'", arg) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultWriteConcernResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultWriteConcernResolver.java index 037d54b213..8b4de14e05 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultWriteConcernResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultWriteConcernResolver.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2021 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DocumentCallbackHandler.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DocumentCallbackHandler.java index d0e1f81143..54f85051fb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DocumentCallbackHandler.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DocumentCallbackHandler.java @@ -1,5 +1,5 @@ /* - * Copyright 2010-2021 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,7 +22,7 @@ /** * An interface used by {@link MongoTemplate} for processing documents returned from a MongoDB query on a per-document - * basis. Implementations of this interface perform the actual work of prcoessing each document but don't need to worry + * basis. Implementations of this interface perform the actual work of processing each document but don't need to worry * about exception handling. {@link MongoException}s will be caught and translated by the calling MongoTemplate An * DocumentCallbackHandler is typically stateful: It keeps the result state within the object, to be available later for * later inspection. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java new file mode 100644 index 0000000000..601b6898b8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java @@ -0,0 +1,31 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +/** + * Encryption algorithms supported by MongoDB Client Side Field Level Encryption. + * + * @author Christoph Strobl + * @author Ross Lawley + * @since 3.3 + */ +public final class EncryptionAlgorithms { + + public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic"; + public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Random = "AEAD_AES_256_CBC_HMAC_SHA_512-Random"; + public static final String RANGE = "Range"; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityLifecycleEventDelegate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityLifecycleEventDelegate.java new file mode 100644 index 0000000000..94352ad65c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityLifecycleEventDelegate.java @@ -0,0 +1,60 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.lang.Nullable; + +/** + * Delegate class to encapsulate lifecycle event configuration and publishing. + * + * @author Mark Paluch + * @since 4.0 + * @see ApplicationEventPublisher + */ +class EntityLifecycleEventDelegate { + + private @Nullable ApplicationEventPublisher publisher; + private boolean eventsEnabled = true; + + public void setPublisher(@Nullable ApplicationEventPublisher publisher) { + this.publisher = publisher; + } + + public boolean isEventsEnabled() { + return eventsEnabled; + } + + public void setEventsEnabled(boolean eventsEnabled) { + this.eventsEnabled = eventsEnabled; + } + + /** + * Publish an application event if event publishing is enabled. + * + * @param event the application event. + */ + public void publishEvent(Object event) { + + if (canPublishEvent()) { + publisher.publishEvent(event); + } + } + + private boolean canPublishEvent() { + return publisher != null && eventsEnabled; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java index f2daf0287d..38269787cb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,33 +15,68 @@ */ package org.springframework.data.mongodb.core; +import java.time.Duration; import java.util.Collection; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.Map; import java.util.Optional; +import java.util.concurrent.TimeUnit; +import java.util.function.Predicate; +import org.bson.BsonNull; import org.bson.Document; import org.springframework.core.convert.ConversionService; +import org.springframework.core.env.Environment; +import org.springframework.core.env.EnvironmentCapable; import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.expression.ValueEvaluationContext; import org.springframework.data.mapping.IdentifierAccessor; import org.springframework.data.mapping.MappingException; import org.springframework.data.mapping.PersistentEntity; import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.PropertyPath; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mapping.model.ConvertingPropertyAccessor; +import org.springframework.data.mongodb.core.CollectionOptions.EncryptedFieldsOptions; +import org.springframework.data.mongodb.core.CollectionOptions.TimeSeriesOptions; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoJsonSchemaMapper; import org.springframework.data.mongodb.core.convert.MongoWriter; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.mapping.TimeSeries; import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.timeseries.Granularity; +import org.springframework.data.mongodb.core.validation.Validator; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.mongodb.util.DurationUtil; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.projection.EntityProjectionIntrospector; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.projection.TargetAware; +import org.springframework.data.util.Optionals; +import org.springframework.expression.spel.support.SimpleEvaluationContext; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.client.model.ChangeStreamPreAndPostImagesOptions; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.TimeSeriesGranularity; +import com.mongodb.client.model.ValidationOptions; /** * Common operations performed on an entity in the context of it's mapping metadata. @@ -49,18 +84,47 @@ * @author Oliver Gierke * @author Mark Paluch * @author Christoph Strobl + * @author Ben Foster + * @author Ross Lawley * @since 2.1 * @see MongoTemplate * @see ReactiveMongoTemplate */ class EntityOperations { - private static final String ID_FIELD = "_id"; + private static final String ID_FIELD = FieldName.ID.name(); private final MappingContext, MongoPersistentProperty> context; + private final QueryMapper queryMapper; + + private final EntityProjectionIntrospector introspector; + + private final MongoJsonSchemaMapper schemaMapper; + + private @Nullable Environment environment; - EntityOperations(MappingContext, MongoPersistentProperty> context) { + EntityOperations(MongoConverter converter) { + this(converter, new QueryMapper(converter)); + } + + EntityOperations(MongoConverter converter, QueryMapper queryMapper) { + this(converter, converter.getMappingContext(), converter.getCustomConversions(), converter.getProjectionFactory(), + queryMapper); + } + + EntityOperations(MongoConverter converter, + MappingContext, MongoPersistentProperty> context, + CustomConversions conversions, ProjectionFactory projectionFactory, QueryMapper queryMapper) { this.context = context; + this.queryMapper = queryMapper; + this.introspector = EntityProjectionIntrospector.create(projectionFactory, + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and(((target, underlyingType) -> !conversions.isSimpleType(target))), + context); + this.schemaMapper = new MongoJsonSchemaMapper(converter); + if (converter instanceof EnvironmentCapable environmentCapable) { + this.environment = environmentCapable.getEnvironment(); + } } /** @@ -72,17 +136,21 @@ class EntityOperations { @SuppressWarnings({ "unchecked", "rawtypes" }) Entity forEntity(T entity) { - Assert.notNull(entity, "Bean must not be null!"); + Assert.notNull(entity, "Bean must not be null"); + + if (entity instanceof TargetAware targetAware) { + return new SimpleMappedEntity((Map) targetAware.getTarget(), this); + } if (entity instanceof String) { - return new UnmappedEntity(parse(entity.toString())); + return new UnmappedEntity(parse(entity.toString()), this); } if (entity instanceof Map) { - return new SimpleMappedEntity((Map) entity); + return new SimpleMappedEntity((Map) entity, this); } - return MappedEntity.of(entity, context); + return MappedEntity.of(entity, context, this); } /** @@ -95,18 +163,18 @@ Entity forEntity(T entity) { @SuppressWarnings({ "unchecked", "rawtypes" }) AdaptibleEntity forEntity(T entity, ConversionService conversionService) { - Assert.notNull(entity, "Bean must not be null!"); - Assert.notNull(conversionService, "ConversionService must not be null!"); + Assert.notNull(entity, "Bean must not be null"); + Assert.notNull(conversionService, "ConversionService must not be null"); if (entity instanceof String) { - return new UnmappedEntity(parse(entity.toString())); + return new UnmappedEntity(parse(entity.toString()), this); } if (entity instanceof Map) { - return new SimpleMappedEntity((Map) entity); + return new SimpleMappedEntity((Map) entity, this); } - return AdaptibleMappedEntity.of(entity, context, conversionService); + return AdaptibleMappedEntity.of(entity, context, conversionService, this); } /** @@ -131,10 +199,17 @@ public String determineCollectionName(@Nullable Class entityClass) { if (entityClass == null) { throw new InvalidDataAccessApiUsageException( - "No class parameter provided, entity collection can't be determined!"); + "No class parameter provided, entity collection can't be determined"); } - return context.getRequiredPersistentEntity(entityClass).getCollection(); + MongoPersistentEntity persistentEntity = context.getPersistentEntity(entityClass); + + if (persistentEntity == null) { + throw new MappingException(String.format( + "Cannot determine collection name from type '%s'. Is it a store native type?", entityClass.getName())); + } + + return persistentEntity.getCollection(); } public Query getByIdInQuery(Collection entities) { @@ -161,7 +236,7 @@ public Query getByIdInQuery(Collection entities) { */ public String getIdPropertyName(Class type) { - Assert.notNull(type, "Type must not be null!"); + Assert.notNull(type, "Type must not be null"); MongoPersistentEntity persistentEntity = context.getPersistentEntity(type); @@ -200,12 +275,12 @@ private static Document parse(String source) { try { return Document.parse(source); } catch (org.bson.json.JsonParseException o_O) { - throw new MappingException("Could not parse given String to save into a JSON document!", o_O); + throw new MappingException("Could not parse given String to save into a JSON document", o_O); } catch (RuntimeException o_O) { // legacy 3.x exception if (ClassUtils.matchesTypeName(o_O.getClass(), "JSONParseException")) { - throw new MappingException("Could not parse given String to save into a JSON document!", o_O); + throw new MappingException("Could not parse given String to save into a JSON document", o_O); } throw o_O; } @@ -218,17 +293,120 @@ public TypedOperations forType(@Nullable Class entityClass) { MongoPersistentEntity entity = context.getPersistentEntity(entityClass); if (entity != null) { - return new TypedEntityOperations(entity); + return new TypedEntityOperations(entity, environment); } } return UntypedOperations.instance(); } + /** + * Introspect the given {@link Class result type} in the context of the {@link Class entity type} whether the returned + * type is a projection and what property paths are participating in the projection. + * + * @param resultType the type to project on. Must not be {@literal null}. + * @param entityType the source domain type. Must not be {@literal null}. + * @return the introspection result. + * @since 3.4 + * @see EntityProjectionIntrospector#introspect(Class, Class) + */ + public EntityProjection introspectProjection(Class resultType, Class entityType) { + + MongoPersistentEntity persistentEntity = queryMapper.getMappingContext().getPersistentEntity(entityType); + if (persistentEntity == null && !resultType.isInterface() || ClassUtils.isAssignable(Document.class, resultType)) { + return (EntityProjection) EntityProjection.nonProjecting(resultType); + } + return introspector.introspect(resultType, entityType); + } + + /** + * Convert {@link CollectionOptions} to {@link CreateCollectionOptions} using {@link Class entityType} to obtain + * mapping metadata. + * + * @param collectionOptions + * @param entityType + * @return + * @since 3.4 + */ + public CreateCollectionOptions convertToCreateCollectionOptions(@Nullable CollectionOptions collectionOptions, + Class entityType) { + + Optional collation = Optionals.firstNonEmpty( + () -> Optional.ofNullable(collectionOptions).flatMap(CollectionOptions::getCollation), + () -> forType(entityType).getCollation());// + + CreateCollectionOptions result = new CreateCollectionOptions(); + collation.map(Collation::toMongoCollation).ifPresent(result::collation); + + if (collectionOptions == null) { + return result; + } + + collectionOptions.getCapped().ifPresent(result::capped); + collectionOptions.getSize().ifPresent(result::sizeInBytes); + collectionOptions.getMaxDocuments().ifPresent(result::maxDocuments); + collectionOptions.getCollation().map(Collation::toMongoCollation).ifPresent(result::collation); + + collectionOptions.getValidationOptions().ifPresent(it -> { + + ValidationOptions validationOptions = new ValidationOptions(); + + it.getValidationAction().ifPresent(validationOptions::validationAction); + it.getValidationLevel().ifPresent(validationOptions::validationLevel); + + it.getValidator().ifPresent(val -> validationOptions.validator(getMappedValidator(val, entityType))); + + result.validationOptions(validationOptions); + }); + + collectionOptions.getTimeSeriesOptions().map(forType(entityType)::mapTimeSeriesOptions).ifPresent(it -> { + + com.mongodb.client.model.TimeSeriesOptions options = new com.mongodb.client.model.TimeSeriesOptions( + it.getTimeField()); + + if (StringUtils.hasText(it.getMetaField())) { + options.metaField(it.getMetaField()); + } + if (!Granularity.DEFAULT.equals(it.getGranularity())) { + options.granularity(TimeSeriesGranularity.valueOf(it.getGranularity().name().toUpperCase())); + } + + if (!it.getExpireAfter().isNegative()) { + result.expireAfter(it.getExpireAfter().toSeconds(), TimeUnit.SECONDS); + } + + result.timeSeriesOptions(options); + }); + + collectionOptions.getChangeStreamOptions() // + .map(CollectionOptions.CollectionChangeStreamOptions::getPreAndPostImages) // + .map(ChangeStreamPreAndPostImagesOptions::new) // + .ifPresent(result::changeStreamPreAndPostImagesOptions); + + collectionOptions.getEncryptedFieldsOptions() // + .map(EncryptedFieldsOptions::toDocument) // + .filter(Predicate.not(Document::isEmpty)) // + .ifPresent(result::encryptedFields); + + return result; + } + + private Document getMappedValidator(Validator validator, Class domainType) { + + Document validationRules = validator.toDocument(); + + if (validationRules.containsKey("$jsonSchema")) { + return schemaMapper.mapSchema(validationRules, domainType); + } + + return queryMapper.getMappedObject(validationRules, context.getPersistentEntity(domainType)); + } + /** * A representation of information about an entity. * * @author Oliver Gierke + * @author Christoph Strobl * @since 2.1 */ interface Entity { @@ -247,6 +425,16 @@ interface Entity { */ Object getId(); + /** + * Returns the property value for {@code key}. + * + * @param key + * @return + * @since 4.1 + */ + @Nullable + Object getPropertyValue(String key); + /** * Returns the {@link Query} to find the entity by its identifier. * @@ -317,6 +505,15 @@ default boolean isVersionedEntity() { * @since 2.1.2 */ boolean isNew(); + + /** + * @param sortObject + * @return + * @since 4.1 + * @throws IllegalStateException if a sort key yields {@literal null}. + */ + Map extractKeys(Document sortObject, Class sourceType); + } /** @@ -338,7 +535,7 @@ interface AdaptibleEntity extends Entity { T populateIdIfNecessary(@Nullable Object id); /** - * Initializes the version property of the of the current entity if available. + * Initializes the version property of the current entity if available. * * @return the entity with the version property updated if available. */ @@ -364,42 +561,33 @@ interface AdaptibleEntity extends Entity { private static class UnmappedEntity> implements AdaptibleEntity { private final T map; + private final EntityOperations entityOperations; - protected UnmappedEntity(T map) { + protected UnmappedEntity(T map, EntityOperations entityOperations) { this.map = map; + this.entityOperations = entityOperations; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getIdPropertyName() - */ @Override public String getIdFieldName() { return ID_FIELD; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getId() - */ @Override public Object getId() { - return map.get(ID_FIELD); + return getPropertyValue(ID_FIELD); + } + + @Override + public Object getPropertyValue(String key) { + return map.get(key); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getByIdQuery() - */ @Override public Query getByIdQuery() { return Query.query(Criteria.where(ID_FIELD).is(map.get(ID_FIELD))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.MutablePersistableSource#populateIdIfNecessary(java.lang.Object) - */ @Nullable @Override public T populateIdIfNecessary(@Nullable Object id) { @@ -409,90 +597,96 @@ public T populateIdIfNecessary(@Nullable Object id) { return map; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getQueryForVersion() - */ @Override public Query getQueryForVersion() { - throw new MappingException("Cannot query for version on plain Documents!"); + throw new MappingException("Cannot query for version on plain Documents"); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#toMappedDocument(org.springframework.data.mongodb.core.convert.MongoWriter) - */ @Override public MappedDocument toMappedDocument(MongoWriter writer) { - return MappedDocument.of(map instanceof Document // - ? (Document) map // + return MappedDocument.of(map instanceof Document document // + ? document // : new Document(map)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.MutablePersistableSource#initializeVersionProperty() - */ @Override public T initializeVersionProperty() { return map; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.MutablePersistableSource#getVersion() - */ @Override @Nullable public Number getVersion() { return null; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.MutablePersistableSource#incrementVersion() - */ @Override public T incrementVersion() { return map; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getBean() - */ @Override public T getBean() { return map; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.Entity#isNew() - */ @Override public boolean isNew() { return map.get(ID_FIELD) != null; } + + @Override + public Map extractKeys(Document sortObject, Class sourceType) { + + Map keyset = new LinkedHashMap<>(); + MongoPersistentEntity sourceEntity = entityOperations.context.getPersistentEntity(sourceType); + if (sourceEntity != null && sourceEntity.hasIdProperty()) { + keyset.put(sourceEntity.getRequiredIdProperty().getName(), getId()); + } else { + keyset.put(ID_FIELD, getId()); + } + + for (String key : sortObject.keySet()) { + + Object value = resolveValue(key, sourceEntity); + + if (value == null) { + throw new IllegalStateException( + String.format("Cannot extract value for key %s because its value is null", key)); + } + + keyset.put(key, value); + } + + return keyset; + } + + @Nullable + private Object resolveValue(String key, @Nullable MongoPersistentEntity sourceEntity) { + + if (sourceEntity == null) { + return BsonUtils.resolveValue(map, key); + } + PropertyPath from = PropertyPath.from(key, sourceEntity.getTypeInformation()); + PersistentPropertyPath persistentPropertyPath = entityOperations.context + .getPersistentPropertyPath(from); + return BsonUtils.resolveValue(map, persistentPropertyPath.toDotPath(MongoPersistentProperty::getFieldName)); + } } private static class SimpleMappedEntity> extends UnmappedEntity { - protected SimpleMappedEntity(T map) { - super(map); + protected SimpleMappedEntity(T map, EntityOperations entityOperations) { + super(map, entityOperations); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#toMappedDocument(org.springframework.data.mongodb.core.convert.MongoWriter) - */ @Override @SuppressWarnings("unchecked") public MappedDocument toMappedDocument(MongoWriter writer) { T bean = getBean(); - bean = (T) (bean instanceof Document // - ? (Document) bean // + bean = (T) (bean instanceof Document document// + ? document // : new Document(bean)); Document document = new Document(); writer.write(bean, document); @@ -506,52 +700,48 @@ private static class MappedEntity implements Entity { private final MongoPersistentEntity entity; private final IdentifierAccessor idAccessor; private final PersistentPropertyAccessor propertyAccessor; + private final EntityOperations entityOperations; protected MappedEntity(MongoPersistentEntity entity, IdentifierAccessor idAccessor, - PersistentPropertyAccessor propertyAccessor) { + PersistentPropertyAccessor propertyAccessor, EntityOperations entityOperations) { this.entity = entity; this.idAccessor = idAccessor; this.propertyAccessor = propertyAccessor; + this.entityOperations = entityOperations; } private static MappedEntity of(T bean, - MappingContext, MongoPersistentProperty> context) { + MappingContext, MongoPersistentProperty> context, + EntityOperations entityOperations) { MongoPersistentEntity entity = context.getRequiredPersistentEntity(bean.getClass()); IdentifierAccessor identifierAccessor = entity.getIdentifierAccessor(bean); PersistentPropertyAccessor propertyAccessor = entity.getPropertyAccessor(bean); - return new MappedEntity<>(entity, identifierAccessor, propertyAccessor); + return new MappedEntity<>(entity, identifierAccessor, propertyAccessor, entityOperations); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getIdPropertyName() - */ @Override public String getIdFieldName() { return entity.getRequiredIdProperty().getFieldName(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getId() - */ @Override public Object getId() { return idAccessor.getRequiredIdentifier(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getByIdQuery() - */ + @Override + public Object getPropertyValue(String key) { + return propertyAccessor.getProperty(entity.getRequiredPersistentProperty(key)); + } + @Override public Query getByIdQuery() { if (!entity.hasIdProperty()) { - throw new MappingException("No id property found for object of type " + entity.getType() + "!"); + throw new MappingException("No id property found for object of type " + entity.getType()); } MongoPersistentProperty idProperty = entity.getRequiredIdProperty(); @@ -559,10 +749,6 @@ public Query getByIdQuery() { return Query.query(Criteria.where(idProperty.getName()).is(getId())); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getQueryForVersion(java.lang.Object) - */ @Override public Query getQueryForVersion() { @@ -573,10 +759,6 @@ public Query getQueryForVersion() { .and(versionProperty.getName()).is(getVersion())); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#toMappedDocument(org.springframework.data.mongodb.core.convert.MongoWriter) - */ @Override public MappedDocument toMappedDocument(MongoWriter writer) { @@ -592,10 +774,6 @@ public MappedDocument toMappedDocument(MongoWriter writer) { return MappedDocument.of(document); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.Entity#assertUpdateableIdIfNotSet() - */ public void assertUpdateableIdIfNotSet() { if (!entity.hasIdProperty()) { @@ -611,47 +789,85 @@ public void assertUpdateableIdIfNotSet() { if (!MongoSimpleTypes.AUTOGENERATED_ID_TYPES.contains(property.getType())) { throw new InvalidDataAccessApiUsageException( - String.format("Cannot autogenerate id of type %s for entity of type %s!", property.getType().getName(), + String.format("Cannot autogenerate id of type %s for entity of type %s", property.getType().getName(), entity.getType().getName())); } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#isVersionedEntity() - */ @Override public boolean isVersionedEntity() { return entity.hasVersionProperty(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getVersion() - */ @Override @Nullable public Object getVersion() { return propertyAccessor.getProperty(entity.getRequiredVersionProperty()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getBean() - */ @Override public T getBean() { return propertyAccessor.getBean(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.Entity#isNew() - */ @Override public boolean isNew() { return entity.isNew(propertyAccessor.getBean()); } + + @Override + public Map extractKeys(Document sortObject, Class sourceType) { + + Map keyset = new LinkedHashMap<>(); + MongoPersistentEntity sourceEntity = entityOperations.context.getPersistentEntity(sourceType); + if (sourceEntity != null && sourceEntity.hasIdProperty()) { + keyset.put(sourceEntity.getRequiredIdProperty().getName(), getId()); + } else { + keyset.put(entity.getRequiredIdProperty().getName(), getId()); + } + + for (String key : sortObject.keySet()) { + + Object value; + if (key.indexOf('.') != -1) { + + // follow the path across nested levels. + // TODO: We should have a MongoDB-specific property path abstraction to allow diving into Document. + value = getNestedPropertyValue(key); + } else { + value = getPropertyValue(key); + } + + if (value == null) { + throw new IllegalStateException( + String.format("Cannot extract value for key %s because its value is null", key)); + } + + keyset.put(key, value); + } + + return keyset; + } + + @Nullable + private Object getNestedPropertyValue(String key) { + + String[] segments = key.split("\\."); + Entity currentEntity = this; + Object currentValue = BsonNull.VALUE; + + for (int i = 0; i < segments.length; i++) { + + String segment = segments[i]; + currentValue = currentEntity.getPropertyValue(segment); + + if (i < segments.length - 1) { + currentEntity = entityOperations.forEntity(currentValue); + } + } + + return currentValue != null ? currentValue : BsonNull.VALUE; + } } private static class AdaptibleMappedEntity extends MappedEntity implements AdaptibleEntity { @@ -661,9 +877,9 @@ private static class AdaptibleMappedEntity extends MappedEntity implements private final IdentifierAccessor identifierAccessor; private AdaptibleMappedEntity(MongoPersistentEntity entity, IdentifierAccessor identifierAccessor, - ConvertingPropertyAccessor propertyAccessor) { + ConvertingPropertyAccessor propertyAccessor, EntityOperations entityOperations) { - super(entity, identifierAccessor, propertyAccessor); + super(entity, identifierAccessor, propertyAccessor, entityOperations); this.entity = entity; this.propertyAccessor = propertyAccessor; @@ -672,20 +888,16 @@ private AdaptibleMappedEntity(MongoPersistentEntity entity, IdentifierAccesso private static AdaptibleEntity of(T bean, MappingContext, MongoPersistentProperty> context, - ConversionService conversionService) { + ConversionService conversionService, EntityOperations entityOperations) { MongoPersistentEntity entity = context.getRequiredPersistentEntity(bean.getClass()); IdentifierAccessor identifierAccessor = entity.getIdentifierAccessor(bean); PersistentPropertyAccessor propertyAccessor = entity.getPropertyAccessor(bean); return new AdaptibleMappedEntity<>(entity, identifierAccessor, - new ConvertingPropertyAccessor<>(propertyAccessor, conversionService)); + new ConvertingPropertyAccessor<>(propertyAccessor, conversionService), entityOperations); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity#populateIdIfNecessary(java.lang.Object) - */ @Nullable @Override public T populateIdIfNecessary(@Nullable Object id) { @@ -707,10 +919,6 @@ public T populateIdIfNecessary(@Nullable Object id) { return propertyAccessor.getBean(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.MappedEntity#getVersion() - */ @Override @Nullable public Number getVersion() { @@ -720,10 +928,6 @@ public Number getVersion() { return propertyAccessor.getProperty(versionProperty, Number.class); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity#initializeVersionProperty() - */ @Override public T initializeVersionProperty() { @@ -738,10 +942,6 @@ public T initializeVersionProperty() { return propertyAccessor.getBean(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity#incrementVersion() - */ @Override public T incrementVersion() { @@ -778,6 +978,32 @@ interface TypedOperations { * @return */ Optional getCollation(Query query); + + /** + * Derive the applicable {@link CollectionOptions} for the given type. + * + * @return never {@literal null}. + * @since 3.3 + */ + CollectionOptions getCollectionOptions(); + + /** + * Map the fields of a given {@link TimeSeriesOptions} against the target domain type to consider potentially + * annotated field names. + * + * @param options must not be {@literal null}. + * @return never {@literal null}. + * @since 3.3 + */ + TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options); + + /** + * @return the name of the id field. + * @since 4.1 + */ + default String getIdKeyName() { + return ID_FIELD; + } } /** @@ -795,19 +1021,11 @@ public static TypedOperations instance() { return (TypedOperations) INSTANCE; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation() - */ @Override public Optional getCollation() { return Optional.empty(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation(org.springframework.data.mongodb.core.query.Query) - */ @Override public Optional getCollation(Query query) { @@ -817,6 +1035,16 @@ public Optional getCollation(Query query) { return query.getCollation(); } + + @Override + public CollectionOptions getCollectionOptions() { + return CollectionOptions.empty(); + } + + @Override + public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options) { + return options; + } } /** @@ -828,23 +1056,19 @@ static class TypedEntityOperations implements TypedOperations { private final MongoPersistentEntity entity; - protected TypedEntityOperations(MongoPersistentEntity entity) { + @Nullable private final Environment environment; + + protected TypedEntityOperations(MongoPersistentEntity entity, @Nullable Environment environment) { + this.entity = entity; + this.environment = environment; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation() - */ @Override public Optional getCollation() { return Optional.ofNullable(entity.getCollation()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation(org.springframework.data.mongodb.core.query.Query) - */ @Override public Optional getCollation(Query query) { @@ -854,6 +1078,99 @@ public Optional getCollation(Query query) { return Optional.ofNullable(entity.getCollation()); } - } + @Override + public CollectionOptions getCollectionOptions() { + + CollectionOptions collectionOptions = CollectionOptions.empty(); + if (entity.hasCollation()) { + collectionOptions = collectionOptions.collation(entity.getCollation()); + } + + if (entity.isAnnotationPresent(TimeSeries.class)) { + + TimeSeries timeSeries = entity.getRequiredAnnotation(TimeSeries.class); + + if (entity.getPersistentProperty(timeSeries.timeField()) == null) { + throw new MappingException(String.format("Time series field '%s' does not exist in type %s", + timeSeries.timeField(), entity.getName())); + } + + TimeSeriesOptions options = TimeSeriesOptions.timeSeries(timeSeries.timeField()); + if (StringUtils.hasText(timeSeries.metaField())) { + + if (entity.getPersistentProperty(timeSeries.metaField()) == null) { + throw new MappingException( + String.format("Meta field '%s' does not exist in type %s", timeSeries.metaField(), entity.getName())); + } + + options = options.metaField(timeSeries.metaField()); + } + if (!Granularity.DEFAULT.equals(timeSeries.granularity())) { + options = options.granularity(timeSeries.granularity()); + } + + if (StringUtils.hasText(timeSeries.expireAfter())) { + + Duration timeout = computeIndexTimeout(timeSeries.expireAfter(), getEvaluationContextForEntity(entity)); + if (!timeout.isNegative()) { + options = options.expireAfter(timeout); + } + } + + collectionOptions = collectionOptions.timeSeries(options); + } + + return collectionOptions; + } + + @Override + public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions source) { + + TimeSeriesOptions target = TimeSeriesOptions.timeSeries(mappedNameOrDefault(source.getTimeField())); + + if (StringUtils.hasText(source.getMetaField())) { + target = target.metaField(mappedNameOrDefault(source.getMetaField())); + } + return target.granularity(source.getGranularity()).expireAfter(source.getExpireAfter()); + } + + @Override + public String getIdKeyName() { + return entity.getIdProperty().getName(); + } + + private String mappedNameOrDefault(String name) { + MongoPersistentProperty persistentProperty = entity.getPersistentProperty(name); + return persistentProperty != null ? persistentProperty.getFieldName() : name; + } + + /** + * Get the {@link ValueEvaluationContext} for a given {@link PersistentEntity entity} the default one. + * + * @param persistentEntity can be {@literal null} + * @return the context to use. + */ + private ValueEvaluationContext getEvaluationContextForEntity(@Nullable PersistentEntity persistentEntity) { + + if (persistentEntity instanceof BasicMongoPersistentEntity mongoEntity) { + return mongoEntity.getValueEvaluationContext(null); + } + + return ValueEvaluationContext.of(this.environment, SimpleEvaluationContext.forReadOnlyDataBinding().build()); + } + + /** + * Compute the index timeout value by evaluating a potential + * {@link org.springframework.expression.spel.standard.SpelExpression} and parsing the final value. + * + * @param timeoutValue must not be {@literal null}. + * @param evaluationContext must not be {@literal null}. + * @return never {@literal null} + * @throws IllegalArgumentException for invalid duration values. + */ + private static Duration computeIndexTimeout(String timeoutValue, ValueEvaluationContext evaluationContext) { + return DurationUtil.evaluate(timeoutValue, evaluationContext); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperation.java index f2bc624923..67ed188655 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,9 +15,10 @@ */ package org.springframework.data.mongodb.core; +import java.util.stream.Stream; + import org.springframework.data.mongodb.core.aggregation.Aggregation; import org.springframework.data.mongodb.core.aggregation.AggregationResults; -import org.springframework.data.util.CloseableIterator; /** * {@link ExecutableAggregationOperation} allows creation and execution of MongoDB aggregation operations in a fluent @@ -88,12 +89,12 @@ interface TerminatingAggregation { /** * Apply pipeline operations as specified and stream all matching elements.
- * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.FindIterable} * - * @return a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to be closed. - * Never {@literal null}. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). */ - CloseableIterator stream(); + Stream stream(); } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupport.java index c45ad0404d..ca5aa7a513 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupport.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,10 +15,11 @@ */ package org.springframework.data.mongodb.core; +import java.util.stream.Stream; + import org.springframework.data.mongodb.core.aggregation.Aggregation; import org.springframework.data.mongodb.core.aggregation.AggregationResults; import org.springframework.data.mongodb.core.aggregation.TypedAggregation; -import org.springframework.data.util.CloseableIterator; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -37,14 +38,10 @@ class ExecutableAggregationOperationSupport implements ExecutableAggregationOper this.template = template; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableAggregationOperation#aggregateAndReturn(java.lang.Class) - */ @Override public ExecutableAggregation aggregateAndReturn(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ExecutableAggregationSupport<>(template, domainType, null, null); } @@ -69,45 +66,29 @@ public ExecutableAggregationSupport(MongoTemplate template, Class domainType, this.collection = collection; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableAggregationOperation.AggregationWithCollection#inCollection(java.lang.String) - */ @Override public AggregationWithAggregation inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty!"); + Assert.hasText(collection, "Collection must not be null nor empty"); return new ExecutableAggregationSupport<>(template, domainType, aggregation, collection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableAggregationOperation.AggregationWithAggregation#by(org.springframework.data.mongodb.core.aggregation.Aggregation) - */ @Override public TerminatingAggregation by(Aggregation aggregation) { - Assert.notNull(aggregation, "Aggregation must not be null!"); + Assert.notNull(aggregation, "Aggregation must not be null"); return new ExecutableAggregationSupport<>(template, domainType, aggregation, collection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableAggregationOperation.TerminatingAggregation#all() - */ @Override public AggregationResults all() { return template.aggregate(aggregation, getCollectionName(aggregation), domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableAggregationOperation.TerminatingAggregation#stream() - */ @Override - public CloseableIterator stream() { + public Stream stream() { return template.aggregateStream(aggregation, getCollectionName(aggregation), domainType); } @@ -117,9 +98,7 @@ private String getCollectionName(Aggregation aggregation) { return collection; } - if (aggregation instanceof TypedAggregation) { - - TypedAggregation typedAggregation = (TypedAggregation) aggregation; + if (aggregation instanceof TypedAggregation typedAggregation) { if (typedAggregation.getInputType() != null) { return template.getCollectionName(typedAggregation.getInputType()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java index d67212bdc6..3358ff2b17 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,6 +20,9 @@ import java.util.stream.Stream; import org.springframework.dao.DataAccessException; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Window; import org.springframework.data.geo.GeoResults; import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.NearQuery; @@ -118,18 +121,34 @@ default Optional first() { /** * Stream all matching elements. * - * @return a {@link Stream} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to be closed. Never - * {@literal null}. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). */ Stream stream(); /** - * Get the number of matching elements. - *

- * This method uses an {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) aggregation - * execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees shard, - * session and transaction compliance. In case an inaccurate count satisfies the applications needs use - * {@link MongoOperations#estimatedCount(String)} for empty queries instead. + * Return a window of elements either starting or resuming at + * {@link org.springframework.data.domain.ScrollPosition}. + *

+ * When using {@link KeysetScrollPosition}, make sure to use non-nullable + * {@link org.springframework.data.domain.Sort sort properties} as MongoDB does not support criteria to reconstruct + * a query result from absent document fields or {@code null} values through {@code $gt/$lt} operators. + * + * @param scrollPosition the scroll position. + * @return a window of the resulting elements. + * @since 4.1 + * @see org.springframework.data.domain.OffsetScrollPosition + * @see org.springframework.data.domain.KeysetScrollPosition + */ + Window scroll(ScrollPosition scrollPosition); + + /** + * Get the number of matching elements.
+ * This method uses an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but + * guarantees shard, session and transaction compliance. In case an inaccurate count satisfies the applications + * needs use {@link MongoOperations#estimatedCount(String)} for empty queries instead. * * @return total number of matching elements. */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupport.java index c196babfa0..4e6c3547c5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupport.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,12 +20,13 @@ import java.util.stream.Stream; import org.bson.Document; + import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Window; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.SerializationUtils; -import org.springframework.data.util.CloseableIterator; -import org.springframework.data.util.StreamUtils; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; @@ -51,14 +52,10 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation { this.template = template; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation#query(java.lang.Class) - */ @Override public ExecutableFind query(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ExecutableFindSupport<>(template, domainType, domainType, null, ALL_QUERY); } @@ -74,11 +71,11 @@ static class ExecutableFindSupport private final MongoTemplate template; private final Class domainType; private final Class returnType; - @Nullable private final String collection; + private final @Nullable String collection; private final Query query; - ExecutableFindSupport(MongoTemplate template, Class domainType, Class returnType, - String collection, Query query) { + ExecutableFindSupport(MongoTemplate template, Class domainType, Class returnType, @Nullable String collection, + Query query) { this.template = template; this.domainType = domainType; this.returnType = returnType; @@ -86,46 +83,30 @@ static class ExecutableFindSupport this.query = query; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithCollection#inCollection(java.lang.String) - */ @Override public FindWithProjection inCollection(String collection) { - Assert.hasText(collection, "Collection name must not be null nor empty!"); + Assert.hasText(collection, "Collection name must not be null nor empty"); return new ExecutableFindSupport<>(template, domainType, returnType, collection, query); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithProjection#as(Class) - */ @Override public FindWithQuery as(Class returnType) { - Assert.notNull(returnType, "ReturnType must not be null!"); + Assert.notNull(returnType, "ReturnType must not be null"); return new ExecutableFindSupport<>(template, domainType, returnType, collection, query); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithQuery#matching(org.springframework.data.mongodb.core.query.Query) - */ @Override public TerminatingFind matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); return new ExecutableFindSupport<>(template, domainType, returnType, collection, query); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#oneValue() - */ @Override public T oneValue() { @@ -136,16 +117,12 @@ public T oneValue() { } if (result.size() > 1) { - throw new IncorrectResultSizeDataAccessException("Query " + asString() + " returned non unique result.", 1); + throw new IncorrectResultSizeDataAccessException("Query " + asString() + " returned non unique result", 1); } return result.iterator().next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#firstValue() - */ @Override public T firstValue() { @@ -154,60 +131,41 @@ public T firstValue() { return ObjectUtils.isEmpty(result) ? null : result.iterator().next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#all() - */ @Override public List all() { return doFind(null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#stream() - */ @Override public Stream stream() { - return StreamUtils.createStreamFromIterator(doStream()); + return doStream(); + } + + @Override + public Window scroll(ScrollPosition scrollPosition) { + return template.doScroll(query.with(scrollPosition), domainType, returnType, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithQuery#near(org.springframework.data.mongodb.core.query.NearQuery) - */ @Override public TerminatingFindNear near(NearQuery nearQuery) { return () -> template.geoNear(nearQuery, domainType, getCollectionName(), returnType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#count() - */ @Override public long count() { return template.count(query, domainType, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#exists() - */ @Override public boolean exists() { return template.exists(query, domainType, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindDistinct#distinct(java.lang.String) - */ @SuppressWarnings("unchecked") @Override public TerminatingDistinct distinct(String field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); return new DistinctOperationSupport(this, field); } @@ -217,8 +175,8 @@ private List doFind(@Nullable CursorPreparer preparer) { Document queryObject = query.getQueryObject(); Document fieldsObject = query.getFieldsObject(); - return template.doFind(getCollectionName(), queryObject, fieldsObject, domainType, returnType, - getCursorPreparer(query, preparer)); + return template.doFind(template.createDelegate(query), getCollectionName(), queryObject, fieldsObject, domainType, + returnType, getCursorPreparer(query, preparer)); } private List doFindDistinct(String field) { @@ -227,7 +185,7 @@ private List doFindDistinct(String field) { returnType == domainType ? (Class) Object.class : returnType); } - private CloseableIterator doStream() { + private Stream doStream() { return template.doStream(query, domainType, getCollectionName(), returnType); } @@ -248,7 +206,7 @@ private String asString() { * @author Christoph Strobl * @since 2.0 */ - static class DelegatingQueryCursorPreparer implements CursorPreparer { + static class DelegatingQueryCursorPreparer implements SortingQueryCursorPreparer { private final @Nullable CursorPreparer delegate; private Optional limit = Optional.empty(); @@ -257,10 +215,6 @@ static class DelegatingQueryCursorPreparer implements CursorPreparer { this.delegate = delegate; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.CursorPreparer#prepare(com.mongodb.clientFindIterable) - */ @Override public FindIterable prepare(FindIterable iterable) { @@ -275,9 +229,16 @@ CursorPreparer limit(int limit) { } @Override + @Nullable public ReadPreference getReadPreference() { return delegate.getReadPreference(); } + + @Override + @Nullable + public Document getSortObject() { + return delegate instanceof SortingQueryCursorPreparer sqcp ? sqcp.getSortObject() : null; + } } /** @@ -295,35 +256,23 @@ public DistinctOperationSupport(ExecutableFindSupport delegate, String field) this.field = field; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.DistinctWithProjection#as(java.lang.Class) - */ @Override @SuppressWarnings("unchecked") public TerminatingDistinct as(Class resultType) { - Assert.notNull(resultType, "ResultType must not be null!"); + Assert.notNull(resultType, "ResultType must not be null"); return new DistinctOperationSupport<>((ExecutableFindSupport) delegate.as(resultType), field); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.DistinctWithQuery#matching(org.springframework.data.mongodb.core.query.Query) - */ @Override public TerminatingDistinct matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); return new DistinctOperationSupport<>((ExecutableFindSupport) delegate.matching(query), field); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingDistinct#all() - */ @Override public List all() { return delegate.doFindDistinct(field); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperation.java index 33df93edf4..c2b08c7e59 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupport.java index 3565aad04c..47b7127deb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupport.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -40,14 +40,10 @@ class ExecutableInsertOperationSupport implements ExecutableInsertOperation { this.template = template; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.coreExecutableInsertOperation#insert(java.lan.Class) - */ @Override public ExecutableInsert insert(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ExecutableInsertSupport<>(template, domainType, null, null); } @@ -71,63 +67,43 @@ static class ExecutableInsertSupport implements ExecutableInsert { this.bulkMode = bulkMode; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation.TerminatingInsert#insert(java.lang.Class) - */ @Override public T one(T object) { - Assert.notNull(object, "Object must not be null!"); + Assert.notNull(object, "Object must not be null"); return template.insert(object, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation.TerminatingInsert#all(java.util.Collection) - */ @Override public Collection all(Collection objects) { - Assert.notNull(objects, "Objects must not be null!"); + Assert.notNull(objects, "Objects must not be null"); return template.insert(objects, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation.TerminatingBulkInsert#bulk(java.util.Collection) - */ @Override public BulkWriteResult bulk(Collection objects) { - Assert.notNull(objects, "Objects must not be null!"); + Assert.notNull(objects, "Objects must not be null"); return template.bulkOps(bulkMode != null ? bulkMode : BulkMode.ORDERED, domainType, getCollectionName()) .insert(new ArrayList<>(objects)).execute(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation.InsertWithCollection#inCollection(java.lang.String) - */ @Override public InsertWithBulkMode inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty."); + Assert.hasText(collection, "Collection must not be null nor empty"); return new ExecutableInsertSupport<>(template, domainType, collection, bulkMode); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation.InsertWithBulkMode#withBulkMode(org.springframework.data.mongodb.core.BulkMode) - */ @Override public TerminatingBulkInsert withBulkMode(BulkMode bulkMode) { - Assert.notNull(bulkMode, "BulkMode must not be null!"); + Assert.notNull(bulkMode, "BulkMode must not be null"); return new ExecutableInsertSupport<>(template, domainType, collection, bulkMode); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperation.java index e3f9601251..2d13ad3ea0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperation.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -187,7 +187,9 @@ interface MapReduceWithProjection extends MapReduceWithQuery { * * @author Christoph Strobl * @since 2.1 + * @deprecated since 4.0 in favor of {@link org.springframework.data.mongodb.core.aggregation}. */ + @Deprecated interface MapReduceWithOptions { /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperationSupport.java index 84e035977f..9f78693540 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperationSupport.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -37,7 +37,7 @@ class ExecutableMapReduceOperationSupport implements ExecutableMapReduceOperatio ExecutableMapReduceOperationSupport(MongoTemplate template) { - Assert.notNull(template, "Template must not be null!"); + Assert.notNull(template, "Template must not be null"); this.template = template; } @@ -48,7 +48,7 @@ class ExecutableMapReduceOperationSupport implements ExecutableMapReduceOperatio @Override public ExecutableMapReduceSupport mapReduce(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ExecutableMapReduceSupport<>(template, domainType, domainType, null, ALL_QUERY, null, null, null); } @@ -101,7 +101,7 @@ public List all() { @Override public MapReduceWithProjection inCollection(String collection) { - Assert.hasText(collection, "Collection name must not be null nor empty!"); + Assert.hasText(collection, "Collection name must not be null nor empty"); return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, reduceFunction, options); @@ -114,7 +114,7 @@ public MapReduceWithProjection inCollection(String collection) { @Override public TerminatingMapReduce matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, reduceFunction, options); @@ -127,7 +127,7 @@ public TerminatingMapReduce matching(Query query) { @Override public MapReduceWithQuery as(Class resultType) { - Assert.notNull(resultType, "ResultType must not be null!"); + Assert.notNull(resultType, "ResultType must not be null"); return new ExecutableMapReduceSupport<>(template, domainType, resultType, collection, query, mapFunction, reduceFunction, options); @@ -140,7 +140,7 @@ public MapReduceWithQuery as(Class resultType) { @Override public ExecutableMapReduce with(MapReduceOptions options) { - Assert.notNull(options, "Options must not be null! Please consider empty MapReduceOptions#options() instead."); + Assert.notNull(options, "Options must not be null Please consider empty MapReduceOptions#options() instead"); return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, reduceFunction, options); @@ -153,7 +153,7 @@ public ExecutableMapReduce with(MapReduceOptions options) { @Override public MapReduceWithReduceFunction map(String mapFunction) { - Assert.hasText(mapFunction, "MapFunction name must not be null nor empty!"); + Assert.hasText(mapFunction, "MapFunction name must not be null nor empty"); return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, reduceFunction, options); @@ -166,7 +166,7 @@ public MapReduceWithReduceFunction map(String mapFunction) { @Override public ExecutableMapReduce reduce(String reduceFunction) { - Assert.hasText(reduceFunction, "ReduceFunction name must not be null nor empty!"); + Assert.hasText(reduceFunction, "ReduceFunction name must not be null nor empty"); return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, reduceFunction, options); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperation.java index 08024935ae..a10cd0317f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -76,7 +76,7 @@ interface TerminatingRemove { /** * Remove and return all matching documents.
- * NOTE The entire list of documents will be fetched before sending the actual delete commands. + * NOTE: The entire list of documents will be fetched before sending the actual delete commands. * Also, {@link org.springframework.context.ApplicationEvent}s will be published for each and every delete * operation. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupport.java index 7cc90b4676..8e84aa7dd6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupport.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,14 +41,10 @@ public ExecutableRemoveOperationSupport(MongoTemplate tempate) { this.tempate = tempate; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableRemoveOperation#remove(java.lang.Class) - */ @Override public ExecutableRemove remove(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ExecutableRemoveSupport<>(tempate, domainType, ALL_QUERY, null); } @@ -71,52 +67,32 @@ public ExecutableRemoveSupport(MongoTemplate template, Class domainType, Quer this.collection = collection; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableRemoveOperation.RemoveWithCollection#inCollection(java.lang.String) - */ @Override public RemoveWithQuery inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty!"); + Assert.hasText(collection, "Collection must not be null nor empty"); return new ExecutableRemoveSupport<>(template, domainType, query, collection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableRemoveOperation.RemoveWithQuery#matching(org.springframework.data.mongodb.core.query.Query) - */ @Override public TerminatingRemove matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); return new ExecutableRemoveSupport<>(template, domainType, query, collection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableRemoveOperation.TerminatingRemove#all() - */ @Override public DeleteResult all() { return template.doRemove(getCollectionName(), query, domainType, true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableRemoveOperation.TerminatingRemove#one() - */ @Override public DeleteResult one() { return template.doRemove(getCollectionName(), query, domainType, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableRemoveOperation.TerminatingRemove#findAndRemove() - */ @Override public List findAndRemove() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java index a8b58669e3..a5c63e9b67 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -87,15 +87,32 @@ default Optional findAndModify() { T findAndModifyValue(); } + /** + * Trigger replaceOne + * execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since 4.2 + */ + interface TerminatingReplace { + + /** + * Find first and replace/upsert. + * + * @return never {@literal null}. + */ + UpdateResult replaceFirst(); + } + /** * Trigger - * findOneAndReplace + * findOneAndReplace * execution by calling one of the terminating methods. * * @author Mark Paluch * @since 2.1 */ - interface TerminatingFindAndReplace { + interface TerminatingFindAndReplace extends TerminatingReplace { /** * Find, replace and return the first matching document. @@ -243,6 +260,22 @@ interface FindAndModifyWithOptions { TerminatingFindAndModify withOptions(FindAndModifyOptions options); } + /** + * @author Christoph Strobl + * @since 4.2 + */ + interface ReplaceWithOptions extends TerminatingReplace { + + /** + * Explicitly define {@link ReplaceOptions}. + * + * @param options must not be {@literal null}. + * @return new instance of {@link FindAndReplaceOptions}. + * @throws IllegalArgumentException if options is {@literal null}. + */ + TerminatingReplace withOptions(ReplaceOptions options); + } + /** * Define {@link FindAndReplaceOptions}. * @@ -250,7 +283,7 @@ interface FindAndModifyWithOptions { * @author Christoph Strobl * @since 2.1 */ - interface FindAndReplaceWithOptions extends TerminatingFindAndReplace { + interface FindAndReplaceWithOptions extends TerminatingFindAndReplace, ReplaceWithOptions { /** * Explicitly define {@link FindAndReplaceOptions} for the {@link Update}. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupport.java index 0a57d77458..593d863d39 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupport.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -40,14 +40,10 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation { this.template = template; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation#update(java.lang.Class) - */ @Override public ExecutableUpdate update(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ExecutableUpdateSupport<>(template, domainType, ALL_QUERY, null, null, null, null, null, domainType); } @@ -85,128 +81,95 @@ static class ExecutableUpdateSupport this.targetType = targetType; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithUpdate#apply(org.springframework.data.mongodb.core.query.UpdateDefinition) - */ @Override public TerminatingUpdate apply(UpdateDefinition update) { - Assert.notNull(update, "Update must not be null!"); + Assert.notNull(update, "Update must not be null"); return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, findAndReplaceOptions, replacement, targetType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithCollection#inCollection(java.lang.String) - */ @Override public UpdateWithQuery inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty!"); + Assert.hasText(collection, "Collection must not be null nor empty"); return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, findAndReplaceOptions, replacement, targetType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.FindAndModifyWithOptions#withOptions(org.springframework.data.mongodb.core.FindAndModifyOptions) - */ @Override public TerminatingFindAndModify withOptions(FindAndModifyOptions options) { - Assert.notNull(options, "Options must not be null!"); + Assert.notNull(options, "Options must not be null"); return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, options, findAndReplaceOptions, replacement, targetType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithUpdate#replaceWith(Object) - */ @Override public FindAndReplaceWithProjection replaceWith(T replacement) { - Assert.notNull(replacement, "Replacement must not be null!"); + Assert.notNull(replacement, "Replacement must not be null"); return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, findAndReplaceOptions, replacement, targetType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.FindAndReplaceWithOptions#withOptions(org.springframework.data.mongodb.core.FindAndReplaceOptions) - */ @Override public FindAndReplaceWithProjection withOptions(FindAndReplaceOptions options) { - Assert.notNull(options, "Options must not be null!"); + Assert.notNull(options, "Options must not be null"); return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, options, replacement, targetType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithQuery#matching(org.springframework.data.mongodb.core.query.Query) - */ + @Override + public TerminatingReplace withOptions(ReplaceOptions options) { + + FindAndReplaceOptions target = new FindAndReplaceOptions(); + if (options.isUpsert()) { + target.upsert(); + } + return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + target, replacement, targetType); + } + @Override public UpdateWithUpdate matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, findAndReplaceOptions, replacement, targetType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.FindAndReplaceWithProjection#as(java.lang.Class) - */ @Override public FindAndReplaceWithOptions as(Class resultType) { - Assert.notNull(resultType, "ResultType must not be null!"); + Assert.notNull(resultType, "ResultType must not be null"); return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, findAndReplaceOptions, replacement, resultType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingUpdate#all() - */ @Override public UpdateResult all() { return doUpdate(true, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingUpdate#first() - */ @Override public UpdateResult first() { return doUpdate(false, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingUpdate#upsert() - */ @Override public UpdateResult upsert() { return doUpdate(true, true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingFindAndModify#findAndModifyValue() - */ @Override public @Nullable T findAndModifyValue() { @@ -215,10 +178,6 @@ public UpdateResult upsert() { getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingFindAndReplace#findAndReplaceValue() - */ @Override public @Nullable T findAndReplaceValue() { @@ -227,6 +186,18 @@ public UpdateResult upsert() { getCollectionName(), targetType); } + @Override + public UpdateResult replaceFirst() { + + if (replacement != null) { + return template.replace(query, domainType, replacement, + findAndReplaceOptions != null ? findAndReplaceOptions : ReplaceOptions.none(), getCollectionName()); + } + + return template.replace(query, domainType, update, + findAndReplaceOptions != null ? findAndReplaceOptions : ReplaceOptions.none(), getCollectionName()); + } + private UpdateResult doUpdate(boolean multi, boolean upsert) { return template.doUpdate(getCollectionName(), query, update, domainType, upsert, multi); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndModifyOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndModifyOptions.java index 30a3ed9e5f..51a2c5b86a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndModifyOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndModifyOptions.java @@ -1,5 +1,5 @@ /* - * Copyright 2010-2021 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -35,7 +35,7 @@ public class FindAndModifyOptions { private static final FindAndModifyOptions NONE = new FindAndModifyOptions() { - private static final String ERROR_MSG = "FindAndModifyOptions.none() cannot be changed. Please use FindAndModifyOptions.options() instead."; + private static final String ERROR_MSG = "FindAndModifyOptions.none() cannot be changed; Please use FindAndModifyOptions.options() instead"; @Override public FindAndModifyOptions returnNew(boolean returnNew) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java index 6122837a27..266a0742c2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,7 +17,7 @@ /** * Options for - * findOneAndReplace. + * findOneAndReplace. *
* Defaults to *
@@ -31,14 +31,13 @@ * @author Christoph Strobl * @since 2.1 */ -public class FindAndReplaceOptions { +public class FindAndReplaceOptions extends ReplaceOptions { private boolean returnNew; - private boolean upsert; private static final FindAndReplaceOptions NONE = new FindAndReplaceOptions() { - private static final String ERROR_MSG = "FindAndReplaceOptions.none() cannot be changed. Please use FindAndReplaceOptions.options() instead."; + private static final String ERROR_MSG = "FindAndReplaceOptions.none() cannot be changed; Please use FindAndReplaceOptions.options() instead"; @Override public FindAndReplaceOptions returnNew() { @@ -109,7 +108,7 @@ public FindAndReplaceOptions returnNew() { */ public FindAndReplaceOptions upsert() { - this.upsert = true; + super.upsert(); return this; } @@ -122,13 +121,4 @@ public boolean isReturnNew() { return returnNew; } - /** - * Get the bit indicating if to create a new document if not exists. - * - * @return {@literal true} if set. - */ - public boolean isUpsert() { - return upsert; - } - } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java index 0628db7d9c..625a85950e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -49,7 +49,7 @@ public interface FindPublisherPreparer extends ReadPreferenceAware { FindPublisher prepare(FindPublisher findPublisher); /** - * Apply query specific settings to {@link MongoCollection} and initate a find operation returning a + * Apply query specific settings to {@link MongoCollection} and initiate a find operation returning a * {@link FindPublisher} via the given {@link Function find} function. * * @param collection must not be {@literal null}. @@ -61,8 +61,8 @@ public interface FindPublisherPreparer extends ReadPreferenceAware { default FindPublisher initiateFind(MongoCollection collection, Function, FindPublisher> find) { - Assert.notNull(collection, "Collection must not be null!"); - Assert.notNull(find, "Find function must not be null!"); + Assert.notNull(collection, "Collection must not be null"); + Assert.notNull(find, "Find function must not be null"); if (hasReadPreference()) { collection = collection.withReadPreference(getReadPreference()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FluentMongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FluentMongoOperations.java index ff2d8a1be1..906afddd4a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FluentMongoOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FluentMongoOperations.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/GeoCommandStatistics.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/GeoCommandStatistics.java index 6f17a3026c..654e7d4330 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/GeoCommandStatistics.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/GeoCommandStatistics.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -39,7 +39,7 @@ class GeoCommandStatistics { */ private GeoCommandStatistics(Document source) { - Assert.notNull(source, "Source document must not be null!"); + Assert.notNull(source, "Source document must not be null"); this.source = source; } @@ -51,7 +51,7 @@ private GeoCommandStatistics(Document source) { */ public static GeoCommandStatistics from(Document commandResult) { - Assert.notNull(commandResult, "Command result must not be null!"); + Assert.notNull(commandResult, "Command result must not be null"); Object stats = commandResult.get("stats"); return stats == null ? NONE : new GeoCommandStatistics((Document) stats); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/HintFunction.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/HintFunction.java new file mode 100644 index 0000000000..57abe9a529 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/HintFunction.java @@ -0,0 +1,129 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.function.Function; + +import org.bson.conversions.Bson; +import org.springframework.data.mongodb.CodecRegistryProvider; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +/** + * Function object to apply a query hint. Can be an index name or a BSON document. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 4.1 + */ +class HintFunction { + + private static final HintFunction EMPTY = new HintFunction(null); + + private final @Nullable Object hint; + + private HintFunction(@Nullable Object hint) { + this.hint = hint; + } + + /** + * Return an empty hint function. + * + * @return + */ + static HintFunction empty() { + return EMPTY; + } + + /** + * Create a {@link HintFunction} from a {@link Bson document} or {@link String index name}. + * + * @param hint + * @return + */ + static HintFunction from(@Nullable Object hint) { + return new HintFunction(hint); + } + + /** + * Return whether a hint is present. + * + * @return + */ + public boolean isPresent() { + return (hint instanceof String hintString && StringUtils.hasText(hintString)) || hint instanceof Bson; + } + + /** + * If a hint is not present, returns {@code true}, otherwise {@code false}. + * + * @return {@code true} if a hint is not present, otherwise {@code false}. + */ + public boolean isEmpty() { + return !isPresent(); + } + + /** + * Apply the hint to consumers depending on the hint format if {@link #isPresent() present}. + * + * @param registryProvider + * @param stringConsumer + * @param bsonConsumer + * @param + */ + public void ifPresent(@Nullable CodecRegistryProvider registryProvider, Function stringConsumer, + Function bsonConsumer) { + + if (isEmpty()) { + return; + } + apply(registryProvider, stringConsumer, bsonConsumer); + } + + /** + * Apply the hint to consumers depending on the hint format. + * + * @param registryProvider + * @param stringConsumer + * @param bsonConsumer + * @return + * @param + */ + public R apply(@Nullable CodecRegistryProvider registryProvider, Function stringConsumer, + Function bsonConsumer) { + + if (isEmpty()) { + throw new IllegalStateException("No hint present"); + } + + if (hint instanceof Bson bson) { + return bsonConsumer.apply(bson); + } + + if (hint instanceof String hintString) { + + if (BsonUtils.isJsonDocument(hintString)) { + return bsonConsumer.apply(BsonUtils.parse(hintString, registryProvider)); + } + return stringConsumer.apply(hintString); + } + + throw new IllegalStateException( + "Unable to read hint of type %s".formatted(hint != null ? hint.getClass() : "null")); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java index e7fae4df5c..f5856100d0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,9 +18,11 @@ import java.util.concurrent.TimeUnit; import org.bson.Document; + import org.springframework.core.convert.converter.Converter; import org.springframework.data.mongodb.core.index.IndexDefinition; import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; import org.springframework.lang.Nullable; import org.springframework.util.ObjectUtils; @@ -89,7 +91,7 @@ private static Converter getIndexDefinitionIndexO ops = ops.bits((Integer) indexOptions.get("bits")); } if (indexOptions.containsKey("bucketSize")) { - ops = ops.bucketSize(((Number) indexOptions.get("bucketSize")).doubleValue()); + MongoCompatibilityAdapter.indexOptionsAdapter(ops).setBucketSize(((Number) indexOptions.get("bucketSize")).doubleValue()); } if (indexOptions.containsKey("default_language")) { ops = ops.defaultLanguage(indexOptions.get("default_language").toString()); @@ -115,6 +117,14 @@ private static Converter getIndexDefinitionIndexO ops = ops.collation(fromDocument(indexOptions.get("collation", Document.class))); } + if (indexOptions.containsKey("wildcardProjection")) { + ops.wildcardProjection(indexOptions.get("wildcardProjection", Document.class)); + } + + if (indexOptions.containsKey("hidden")) { + ops = ops.hidden((Boolean) indexOptions.get("hidden")); + } + return ops; }; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java index 340c11bb99..da4766343a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,6 +20,7 @@ import org.bson.Document; import org.bson.conversions.Bson; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.query.Update; import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.util.StreamUtils; @@ -33,7 +34,7 @@ */ public class MappedDocument { - private static final String ID_FIELD = "_id"; + private static final String ID_FIELD = FieldName.ID.name(); private static final Document ID_ONLY_PROJECTION = new Document(ID_FIELD, 1); private final Document document; @@ -97,6 +98,16 @@ public Document getDocument() { return this.document; } + /** + * Updates the documents {@link #ID_FIELD}. + * + * @param value the {@literal _id} value to set. + * @since 3.4.3 + */ + public void updateId(Object value) { + document.put(ID_FIELD, value); + } + /** * An {@link UpdateDefinition} that indicates that the {@link #getUpdateObject() update object} has already been * mapped to the specific domain type. @@ -104,7 +115,7 @@ public Document getDocument() { * @author Christoph Strobl * @since 2.2 */ - class MappedUpdate implements UpdateDefinition { + static class MappedUpdate implements UpdateDefinition { private final Update delegate; @@ -112,49 +123,34 @@ class MappedUpdate implements UpdateDefinition { this.delegate = delegate; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.UpdateDefinition#getUpdateObject() - */ @Override public Document getUpdateObject() { return delegate.getUpdateObject(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.UpdateDefinition#modifies(java.lang.String) - */ @Override public boolean modifies(String key) { return delegate.modifies(key); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.UpdateDefinition#inc(java.lang.String) - */ @Override public void inc(String version) { delegate.inc(version); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.UpdateDefinition#isIsolated() - */ @Override public Boolean isIsolated() { return delegate.isIsolated(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.UpdateDefinition#getArrayFilters() - */ @Override public List getArrayFilters() { return delegate.getArrayFilters(); } + + @Override + public boolean hasArrayFilters() { + return delegate.hasArrayFilters(); + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java index ecbf8a4f07..bc26dfb68c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2021 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,24 +20,38 @@ import java.util.Collections; import java.util.EnumSet; import java.util.List; +import java.util.function.Predicate; +import java.util.stream.Collectors; + +import org.bson.Document; import org.springframework.data.mapping.PersistentProperty; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.mapping.Encrypted; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.Queryable; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ArrayJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.QueryableJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.JsonSchemaObject; import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; import org.springframework.data.mongodb.core.schema.MongoJsonSchema.MongoJsonSchemaBuilder; +import org.springframework.data.mongodb.core.schema.QueryCharacteristic; +import org.springframework.data.mongodb.core.schema.QueryCharacteristics; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject; +import org.springframework.data.util.TypeInformation; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; import org.springframework.util.CollectionUtils; +import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; /** * {@link MongoJsonSchemaCreator} implementation using both {@link MongoConverter} and {@link MappingContext} to obtain @@ -52,6 +66,8 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator { private final MongoConverter converter; private final MappingContext, MongoPersistentProperty> mappingContext; + private final Predicate filter; + private final LinkedMultiValueMap> mergeProperties; /** * Create a new instance of {@link MappingMongoJsonSchemaCreator}. @@ -61,27 +77,80 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator { @SuppressWarnings("unchecked") MappingMongoJsonSchemaCreator(MongoConverter converter) { - Assert.notNull(converter, "Converter must not be null!"); + this(converter, (MappingContext, MongoPersistentProperty>) converter.getMappingContext(), + (property) -> true, new LinkedMultiValueMap<>()); + } + + @SuppressWarnings("unchecked") + MappingMongoJsonSchemaCreator(MongoConverter converter, + MappingContext, MongoPersistentProperty> mappingContext, + Predicate filter, LinkedMultiValueMap> mergeProperties) { + + Assert.notNull(converter, "Converter must not be null"); this.converter = converter; - this.mappingContext = (MappingContext, MongoPersistentProperty>) converter - .getMappingContext(); + this.mappingContext = mappingContext; + this.filter = filter; + this.mergeProperties = mergeProperties; } - /* - * (non-Javadoc) - * org.springframework.data.mongodb.core.MongoJsonSchemaCreator#createSchemaFor(java.lang.Class) + @Override + public MongoJsonSchemaCreator filter(Predicate filter) { + return new MappingMongoJsonSchemaCreator(converter, mappingContext, filter, mergeProperties); + } + + @Override + public PropertySpecifier property(String path) { + return types -> withTypesFor(path, types); + } + + /** + * Specify additional types to be considered when rendering the schema for the given path. + * + * @param path path the path using {@literal dot '.'} notation. + * @param types must not be {@literal null}. + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.4 */ + public MongoJsonSchemaCreator withTypesFor(String path, Class... types) { + + LinkedMultiValueMap> clone = mergeProperties.clone(); + for (Class type : types) { + clone.add(path, type); + } + return new MappingMongoJsonSchemaCreator(converter, mappingContext, filter, clone); + } + @Override public MongoJsonSchema createSchemaFor(Class type) { MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(type); MongoJsonSchemaBuilder schemaBuilder = MongoJsonSchema.builder(); + Encrypted encrypted = entity.findAnnotation(Encrypted.class); + if (encrypted != null) { + schemaBuilder.encryptionMetadata(getEncryptionMetadata(entity, encrypted)); + } + List schemaProperties = computePropertiesForEntity(Collections.emptyList(), entity); schemaBuilder.properties(schemaProperties.toArray(new JsonSchemaProperty[0])); return schemaBuilder.build(); + } + + private static Document getEncryptionMetadata(MongoPersistentEntity entity, Encrypted encrypted) { + + Document encryptionMetadata = new Document(); + + Collection encryptionKeyIds = entity.getEncryptionKeyIds(); + if (!CollectionUtils.isEmpty(encryptionKeyIds)) { + encryptionMetadata.append("keyId", encryptionKeyIds); + } + + if (StringUtils.hasText(encrypted.algorithm())) { + encryptionMetadata.append("algorithm", encrypted.algorithm()); + } + return encryptionMetadata; } private List computePropertiesForEntity(List path, @@ -93,6 +162,14 @@ private List computePropertiesForEntity(List currentPath = new ArrayList<>(path); + String stringPath = currentPath.stream().map(PersistentProperty::getName).collect(Collectors.joining(".")); + stringPath = StringUtils.hasText(stringPath) ? (stringPath + "." + nested.getName()) : nested.getName(); + if (!filter.test(new PropertyContext(stringPath, nested))) { + if (!mergeProperties.containsKey(stringPath)) { + continue; + } + } + if (path.contains(nested)) { // cycle guard schemaProperties.add(createSchemaProperty(computePropertyFieldName(CollectionUtils.lastElement(currentPath)), Object.class, false)); @@ -108,27 +185,149 @@ private List computePropertiesForEntity(List path) { + String stringPath = path.stream().map(MongoPersistentProperty::getName).collect(Collectors.joining(".")); MongoPersistentProperty property = CollectionUtils.lastElement(path); boolean required = isRequiredProperty(property); Class rawTargetType = computeTargetType(property); // target type before conversion Class targetType = converter.getTypeMapper().getWriteTargetTypeFor(rawTargetType); // conversion target type - if (property.isEntity() && ObjectUtils.nullSafeEquals(rawTargetType, targetType)) { - return createObjectSchemaPropertyForEntity(path, property, required); + if ((rawTargetType.isPrimitive() || ClassUtils.isPrimitiveArray(rawTargetType)) && targetType == Object.class + || ClassUtils.isAssignable(targetType, rawTargetType)) { + targetType = rawTargetType; + } + + if (!isCollection(property) && ObjectUtils.nullSafeEquals(rawTargetType, targetType)) { + if (property.isEntity() || mergeProperties.containsKey(stringPath)) { + List targetProperties = new ArrayList<>(); + + if (property.isEntity()) { + targetProperties.add(createObjectSchemaPropertyForEntity(path, property, required)); + } + if (mergeProperties.containsKey(stringPath)) { + for (Class theType : mergeProperties.get(stringPath)) { + + ObjectJsonSchemaProperty target = JsonSchemaProperty.object(property.getName()); + List nestedProperties = computePropertiesForEntity(path, + mappingContext.getRequiredPersistentEntity(theType)); + + targetProperties.add(createPotentiallyRequiredSchemaProperty( + target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required)); + } + } + JsonSchemaProperty schemaProperty = targetProperties.size() == 1 ? targetProperties.iterator().next() + : JsonSchemaProperty.merged(targetProperties); + return applyEncryptionDataIfNecessary(property, schemaProperty); + } } String fieldName = computePropertyFieldName(property); - if (property.isCollectionLike()) { - return createSchemaProperty(fieldName, targetType, required); + JsonSchemaProperty schemaProperty; + if (isCollection(property)) { + schemaProperty = createArraySchemaProperty(fieldName, property, required); } else if (property.isMap()) { - return createSchemaProperty(fieldName, Type.objectType(), required); + schemaProperty = createSchemaProperty(fieldName, Type.objectType(), required); } else if (ClassUtils.isAssignable(Enum.class, targetType)) { - return createEnumSchemaProperty(fieldName, targetType, required); + schemaProperty = createEnumSchemaProperty(fieldName, targetType, required); + } else { + schemaProperty = createSchemaProperty(fieldName, targetType, required); + } + + return applyEncryptionDataIfNecessary(property, schemaProperty); + } + + private JsonSchemaProperty createArraySchemaProperty(String fieldName, MongoPersistentProperty property, + boolean required) { + + ArrayJsonSchemaProperty schemaProperty = JsonSchemaProperty.array(fieldName); + + if (isSpecificType(property)) { + schemaProperty = potentiallyEnhanceArraySchemaProperty(property, schemaProperty); + } + + return createPotentiallyRequiredSchemaProperty(schemaProperty, required); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + private ArrayJsonSchemaProperty potentiallyEnhanceArraySchemaProperty(MongoPersistentProperty property, + ArrayJsonSchemaProperty schemaProperty) { + + MongoPersistentEntity persistentEntity = mappingContext + .getPersistentEntity(property.getTypeInformation().getRequiredComponentType()); + + if (persistentEntity != null) { + + List nestedProperties = computePropertiesForEntity(Collections.emptyList(), persistentEntity); + + if (nestedProperties.isEmpty()) { + return schemaProperty; + } + + return schemaProperty + .items(JsonSchemaObject.object().properties(nestedProperties.toArray(new JsonSchemaProperty[0]))); + } + + if (ClassUtils.isAssignable(Enum.class, property.getActualType())) { + + List possibleValues = getPossibleEnumValues((Class) property.getActualType()); + + return schemaProperty + .items(createSchemaObject(computeTargetType(property.getActualType(), possibleValues), possibleValues)); } - return createSchemaProperty(fieldName, targetType, required); + return schemaProperty.items(JsonSchemaObject.of(property.getActualType())); + } + + private boolean isSpecificType(MongoPersistentProperty property) { + return !TypeInformation.OBJECT.equals(property.getTypeInformation().getActualType()); + } + + private JsonSchemaProperty applyEncryptionDataIfNecessary(MongoPersistentProperty property, + JsonSchemaProperty schemaProperty) { + + Encrypted encrypted = property.findAnnotation(Encrypted.class); + if (encrypted == null) { + return schemaProperty; + } + + EncryptedJsonSchemaProperty enc = new EncryptedJsonSchemaProperty(schemaProperty); + if (StringUtils.hasText(encrypted.algorithm())) { + enc = enc.algorithm(encrypted.algorithm()); + } + if (!ObjectUtils.isEmpty(encrypted.keyId())) { + enc = enc.keys(property.getEncryptionKeyIds()); + } + + Queryable queryable = property.findAnnotation(Queryable.class); + if (queryable == null || !StringUtils.hasText(queryable.queryType())) { + return enc; + } + + QueryCharacteristic characteristic = new QueryCharacteristic() { + + @Override + public String queryType() { + return queryable.queryType(); + } + + @Override + public Document toDocument() { + + Document options = QueryCharacteristic.super.toDocument(); + + if (queryable.contentionFactor() >= 0) { + options.put("contention", queryable.contentionFactor()); + } + + if (StringUtils.hasText(queryable.queryAttributes())) { + options.putAll(Document.parse(queryable.queryAttributes())); + } + + return options; + } + }; + return new QueryableJsonSchemaProperty(enc, QueryCharacteristics.of(characteristic)); } private JsonSchemaProperty createObjectSchemaPropertyForEntity(List path, @@ -142,15 +341,12 @@ private JsonSchemaProperty createObjectSchemaPropertyForEntity(List targetType, boolean required) { - List possibleValues = new ArrayList<>(); - - for (Object enumValue : EnumSet.allOf((Class) targetType)) { - possibleValues.add(converter.convertToMongoType(enumValue)); - } + List possibleValues = getPossibleEnumValues((Class) targetType); - targetType = possibleValues.isEmpty() ? targetType : possibleValues.iterator().next().getClass(); + targetType = computeTargetType(targetType, possibleValues); return createSchemaProperty(fieldName, targetType, required, possibleValues); } @@ -161,34 +357,39 @@ JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean r JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required, Collection possibleValues) { - TypedJsonSchemaObject schemaObject = type instanceof Type ? JsonSchemaObject.of(Type.class.cast(type)) + TypedJsonSchemaObject schemaObject = createSchemaObject(type, possibleValues); + + return createPotentiallyRequiredSchemaProperty(JsonSchemaProperty.named(fieldName).with(schemaObject), required); + } + + private TypedJsonSchemaObject createSchemaObject(Object type, Collection possibleValues) { + + TypedJsonSchemaObject schemaObject = type instanceof Type typeObject ? JsonSchemaObject.of(typeObject) : JsonSchemaObject.of(Class.class.cast(type)); if (!CollectionUtils.isEmpty(possibleValues)) { schemaObject = schemaObject.possibleValues(possibleValues); } - - return createPotentiallyRequiredSchemaProperty(JsonSchemaProperty.named(fieldName).with(schemaObject), required); + return schemaObject; } - private String computePropertyFieldName(PersistentProperty property) { + private String computePropertyFieldName(PersistentProperty property) { - return property instanceof MongoPersistentProperty ? ((MongoPersistentProperty) property).getFieldName() + return property instanceof MongoPersistentProperty mongoPersistentProperty ? mongoPersistentProperty.getFieldName() : property.getName(); } - private boolean isRequiredProperty(PersistentProperty property) { + private boolean isRequiredProperty(PersistentProperty property) { return property.getType().isPrimitive(); } private Class computeTargetType(PersistentProperty property) { - if (!(property instanceof MongoPersistentProperty)) { + if (!(property instanceof MongoPersistentProperty mongoProperty)) { return property.getType(); } - MongoPersistentProperty mongoProperty = (MongoPersistentProperty) property; - if (!mongoProperty.isIdProperty()) { + if (!property.getOwner().isIdProperty(property)) { return mongoProperty.getFieldType(); } @@ -199,12 +400,53 @@ private Class computeTargetType(PersistentProperty property) { return mongoProperty.getFieldType() != mongoProperty.getActualType() ? Object.class : mongoProperty.getFieldType(); } + private static Class computeTargetType(Class fallback, List possibleValues) { + return possibleValues.isEmpty() ? fallback : possibleValues.iterator().next().getClass(); + } + + private > List getPossibleEnumValues(Class targetType) { + + EnumSet enumSet = EnumSet.allOf(targetType); + List possibleValues = new ArrayList<>(enumSet.size()); + + for (Object enumValue : enumSet) { + possibleValues.add(converter.convertToMongoType(enumValue)); + } + + return possibleValues; + } + + private static boolean isCollection(MongoPersistentProperty property) { + return property.isCollectionLike() && !property.getType().equals(byte[].class); + } + static JsonSchemaProperty createPotentiallyRequiredSchemaProperty(JsonSchemaProperty property, boolean required) { + return required ? JsonSchemaProperty.required(property) : property; + } + + class PropertyContext implements JsonSchemaPropertyContext { - if (!required) { + private final String path; + private final MongoPersistentProperty property; + + public PropertyContext(String path, MongoPersistentProperty property) { + this.path = path; + this.property = property; + } + + @Override + public String getPath() { + return path; + } + + @Override + public MongoPersistentProperty getProperty() { return property; } - return JsonSchemaProperty.required(property); + @Override + public MongoPersistentEntity resolveEntity(MongoPersistentProperty property) { + return (MongoPersistentEntity) mappingContext.getPersistentEntity(property); + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAction.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAction.java index dd881f9312..fdfeaa81ad 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAction.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAction.java @@ -1,5 +1,5 @@ /* - * Copyright 2011-2021 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -57,8 +57,8 @@ public class MongoAction { public MongoAction(@Nullable WriteConcern defaultWriteConcern, MongoActionOperation mongoActionOperation, String collectionName, @Nullable Class entityType, @Nullable Document document, @Nullable Document query) { - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(mongoActionOperation, "MongoActionOperation must not be null!"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(mongoActionOperation, "MongoActionOperation must not be null"); this.defaultWriteConcern = defaultWriteConcern; this.mongoActionOperation = mongoActionOperation; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoActionOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoActionOperation.java index dbc688d94b..509d10887b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoActionOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoActionOperation.java @@ -1,5 +1,5 @@ /* - * Copyright 2011-2021 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,9 +21,10 @@ * * @author Mark Pollack * @author Oliver Gierke + * @author Christoph Strobl * @see MongoAction */ public enum MongoActionOperation { - REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE, BULK; + REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE, BULK, REPLACE } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdmin.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdmin.java index 1e3465cd8b..5fcc6c9599 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdmin.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdmin.java @@ -1,5 +1,5 @@ /* - * Copyright 2011-2021 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,7 +30,9 @@ * @author Thomas Darimont * @author Mark Paluch * @author Christoph Strobl + * @deprecated since 4.5 */ +@Deprecated(since = "4.5", forRemoval = true) @ManagedResource(description = "Mongo Admin Operations") public class MongoAdmin implements MongoAdminOperations { @@ -42,29 +44,20 @@ public class MongoAdmin implements MongoAdminOperations { */ public MongoAdmin(MongoClient client) { - Assert.notNull(client, "Client must not be null!"); + Assert.notNull(client, "Client must not be null"); this.mongoClient = client; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.core.MongoAdminOperations#dropDatabase(java.lang.String) - */ @ManagedOperation public void dropDatabase(String databaseName) { getDB(databaseName).drop(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.core.MongoAdminOperations#createDatabase(java.lang.String) - */ @ManagedOperation public void createDatabase(String databaseName) { getDB(databaseName); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.core.MongoAdminOperations#getDatabaseStats(java.lang.String) - */ @ManagedOperation public String getDatabaseStats(String databaseName) { return getDB(databaseName).runCommand(new Document("dbStats", 1).append("scale", 1024)).toJson(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdminOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdminOperations.java index 415fba76b5..ec03302f7e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdminOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdminOperations.java @@ -1,5 +1,5 @@ /* - * Copyright 2011-2021 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,6 +21,7 @@ * @author Mark Pollack * @author Oliver Gierke */ +@Deprecated(since = "4.5", forRemoval = true) public interface MongoAdminOperations { @ManagedOperation diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientFactoryBean.java index 22194a451a..c5fee9cf54 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientFactoryBean.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2021 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -55,8 +55,6 @@ */ public class MongoClientFactoryBean extends AbstractFactoryBean implements PersistenceExceptionTranslator { - private static final PersistenceExceptionTranslator DEFAULT_EXCEPTION_TRANSLATOR = new MongoExceptionTranslator(); - private @Nullable MongoClientSettings mongoClientSettings; private @Nullable String host; private @Nullable Integer port; @@ -64,7 +62,7 @@ public class MongoClientFactoryBean extends AbstractFactoryBean imp private @Nullable ConnectionString connectionString; private @Nullable String replicaSet = null; - private PersistenceExceptionTranslator exceptionTranslator = DEFAULT_EXCEPTION_TRANSLATOR; + private PersistenceExceptionTranslator exceptionTranslator = MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR; /** * Set the {@link MongoClientSettings} to be used when creating {@link MongoClient}. @@ -116,35 +114,34 @@ public void setReplicaSet(@Nullable String replicaSet) { * @param exceptionTranslator */ public void setExceptionTranslator(@Nullable PersistenceExceptionTranslator exceptionTranslator) { - this.exceptionTranslator = exceptionTranslator == null ? DEFAULT_EXCEPTION_TRANSLATOR : exceptionTranslator; + this.exceptionTranslator = exceptionTranslator == null ? MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR + : exceptionTranslator; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - public Class getObjectType() { - return MongoClient.class; - } - - /* - * (non-Javadoc) - * @see org.springframework.dao.support.PersistenceExceptionTranslator#translateExceptionIfPossible(java.lang.RuntimeException) - */ + @Override @Nullable public DataAccessException translateExceptionIfPossible(RuntimeException ex) { return exceptionTranslator.translateExceptionIfPossible(ex); } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance() - */ + @Override + public Class getObjectType() { + return MongoClient.class; + } + @Override protected MongoClient createInstance() throws Exception { return createMongoClient(computeClientSetting()); } + @Override + protected void destroyInstance(@Nullable MongoClient instance) throws Exception { + + if (instance != null) { + instance.close(); + } + } + /** * Create {@link MongoClientSettings} based on configuration and priority (lower is better). *
    @@ -158,7 +155,7 @@ protected MongoClient createInstance() throws Exception { protected MongoClientSettings computeClientSetting() { if (connectionString != null && (StringUtils.hasText(host) || port != null)) { - throw new IllegalStateException("ConnectionString and host/port configuration exclude one another!"); + throw new IllegalStateException("ConnectionString and host/port configuration exclude one another"); } ConnectionString connectionString = this.connectionString != null ? this.connectionString @@ -336,23 +333,16 @@ private T computeSettingsValue(T defaultValue, T fromSettings, T fromConnect return !fromConnectionStringIsDefault ? fromConnectionString : defaultValue; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.config.AbstractFactoryBean#destroyInstance(java.lang.Object) - */ - @Override - protected void destroyInstance(@Nullable MongoClient instance) throws Exception { - - if (instance != null) { - instance.close(); - } - } - private MongoClient createMongoClient(MongoClientSettings settings) throws UnknownHostException { return MongoClients.create(settings, SpringDataMongoDB.driverInformation()); } private String getOrDefault(Object value, String defaultValue) { - return !StringUtils.isEmpty(value) ? value.toString() : defaultValue; + + if(value == null) { + return defaultValue; + } + String sValue = value.toString(); + return StringUtils.hasText(sValue) ? sValue : defaultValue; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java index 162035a45d..02913b4303 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2021 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,7 +25,9 @@ import org.bson.UuidRepresentation; import org.bson.codecs.configuration.CodecRegistry; + import org.springframework.beans.factory.config.AbstractFactoryBean; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; import org.springframework.lang.Nullable; import org.springframework.util.CollectionUtils; import org.springframework.util.StringUtils; @@ -36,10 +38,11 @@ import com.mongodb.ReadConcern; import com.mongodb.ReadPreference; import com.mongodb.ServerAddress; +import com.mongodb.ServerApi; import com.mongodb.WriteConcern; import com.mongodb.connection.ClusterConnectionMode; import com.mongodb.connection.ClusterType; -import com.mongodb.connection.StreamFactoryFactory; +import com.mongodb.connection.TransportSettings; /** * A factory bean for construction of a {@link MongoClientSettings} instance to be used with a MongoDB driver. @@ -53,7 +56,10 @@ public class MongoClientSettingsFactoryBean extends AbstractFactoryBean getObjectType() { return MongoClientSettings.class; @@ -422,7 +444,6 @@ protected MongoClientSettings createInstance() { settings.hosts(clusterHosts); } settings.localThreshold(clusterLocalThresholdMS, TimeUnit.MILLISECONDS); - // settings.maxWaitQueueSize(clusterMaxWaitQueueSize); settings.requiredClusterType(custerRequiredClusterType); if (StringUtils.hasText(clusterSrvHost)) { @@ -467,18 +488,26 @@ protected MongoClientSettings createInstance() { } }); + if (transportSettings != null) { + builder.transportSettings(transportSettings); + } + if (streamFactoryFactory != null) { - builder = builder.streamFactoryFactory(streamFactoryFactory); + MongoCompatibilityAdapter.clientSettingsBuilderAdapter(builder).setStreamFactoryFactory(streamFactoryFactory); } + if (retryReads != null) { builder = builder.retryReads(retryReads); } + if (retryWrites != null) { builder = builder.retryWrites(retryWrites); } - if (uUidRepresentation != null) { - builder.uuidRepresentation(uUidRepresentation); + builder = builder.uuidRepresentation(uUidRepresentation); + } + if (serverApi != null) { + builder = builder.serverApi(serverApi); } return builder.build(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDataIntegrityViolationException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDataIntegrityViolationException.java index ee3608e8ed..df58a36770 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDataIntegrityViolationException.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDataIntegrityViolationException.java @@ -1,5 +1,5 @@ /* - * Copyright 2013-2021 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -44,8 +44,8 @@ public MongoDataIntegrityViolationException(String message, WriteConcernResult w super(message); - Assert.notNull(writeResult, "WriteResult must not be null!"); - Assert.notNull(actionOperation, "MongoActionOperation must not be null!"); + Assert.notNull(writeResult, "WriteResult must not be null"); + Assert.notNull(actionOperation, "MongoActionOperation must not be null"); this.writeResult = writeResult; this.actionOperation = actionOperation; @@ -61,7 +61,7 @@ public WriteConcernResult getWriteResult() { } /** - * Returns the {@link MongoActionOperation} in which the current exception occured. + * Returns the {@link MongoActionOperation} in which the current exception occurred. * * @return the actionOperation */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java index dac4b0d6d7..eab6b5d7f4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,8 +32,7 @@ /** * Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as - * database name and exception translator. - *

    + * database name and exception translator.
    * Not intended to be used directly. * * @author Christoph Strobl @@ -47,8 +46,8 @@ public abstract class MongoDatabaseFactorySupport implements MongoDatabaseFac private final C mongoClient; private final String databaseName; private final boolean mongoInstanceCreated; - private final PersistenceExceptionTranslator exceptionTranslator; + private PersistenceExceptionTranslator exceptionTranslator; private @Nullable WriteConcern writeConcern; /** @@ -64,10 +63,10 @@ public abstract class MongoDatabaseFactorySupport implements MongoDatabaseFac protected MongoDatabaseFactorySupport(C mongoClient, String databaseName, boolean mongoInstanceCreated, PersistenceExceptionTranslator exceptionTranslator) { - Assert.notNull(mongoClient, "MongoClient must not be null!"); - Assert.hasText(databaseName, "Database name must not be empty!"); + Assert.notNull(mongoClient, "MongoClient must not be null"); + Assert.hasText(databaseName, "Database name must not be empty"); Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"), - "Database name must not contain slashes, dots, spaces, quotes, or dollar signs!"); + "Database name must not contain slashes, dots, spaces, quotes, or dollar signs"); this.mongoClient = mongoClient; this.databaseName = databaseName; @@ -75,31 +74,39 @@ protected MongoDatabaseFactorySupport(C mongoClient, String databaseName, boolea this.exceptionTranslator = exceptionTranslator; } + /** + * Configures the {@link PersistenceExceptionTranslator} to be used. + * + * @param exceptionTranslator the exception translator to set. + * @since 4.4 + */ + public void setExceptionTranslator(PersistenceExceptionTranslator exceptionTranslator) { + this.exceptionTranslator = exceptionTranslator; + } + + @Override + public PersistenceExceptionTranslator getExceptionTranslator() { + return this.exceptionTranslator; + } + /** * Configures the {@link WriteConcern} to be used on the {@link MongoDatabase} instance being created. * - * @param writeConcern the writeConcern to set + * @param writeConcern the writeConcern to set. */ public void setWriteConcern(WriteConcern writeConcern) { this.writeConcern = writeConcern; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase() - */ + @Override public MongoDatabase getMongoDatabase() throws DataAccessException { return getMongoDatabase(getDefaultDatabaseName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase(java.lang.String) - */ @Override public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException { - Assert.hasText(dbName, "Database name must not be empty!"); + Assert.hasText(dbName, "Database name must not be empty"); MongoDatabase db = doGetMongoDatabase(dbName); @@ -118,28 +125,13 @@ public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException */ protected abstract MongoDatabase doGetMongoDatabase(String dbName); - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.DisposableBean#destroy() - */ public void destroy() throws Exception { if (mongoInstanceCreated) { closeClient(); } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator() - */ - public PersistenceExceptionTranslator getExceptionTranslator() { - return this.exceptionTranslator; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.Session) - */ + @Override public MongoDatabaseFactory withSession(ClientSession session) { return new MongoDatabaseFactorySupport.ClientSessionBoundMongoDbFactory(session, this); } @@ -180,55 +172,31 @@ public ClientSessionBoundMongoDbFactory(ClientSession session, MongoDatabaseFact this.delegate = delegate; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase() - */ @Override public MongoDatabase getMongoDatabase() throws DataAccessException { return proxyMongoDatabase(delegate.getMongoDatabase()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase(java.lang.String) - */ @Override public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException { return proxyMongoDatabase(delegate.getMongoDatabase(dbName)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator() - */ @Override public PersistenceExceptionTranslator getExceptionTranslator() { return delegate.getExceptionTranslator(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions) - */ @Override public ClientSession getSession(ClientSessionOptions options) { return delegate.getSession(options); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.ClientSession) - */ @Override public MongoDatabaseFactory withSession(ClientSession session) { return delegate.withSession(session); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#isTransactionActive() - */ @Override public boolean isTransactionActive() { return session != null && session.hasActiveTransaction(); @@ -269,7 +237,7 @@ public MongoDatabaseFactory getDelegate() { } @Override - public boolean equals(Object o) { + public boolean equals(@Nullable Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDbFactorySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDbFactorySupport.java deleted file mode 100644 index bc0e39bbc9..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDbFactorySupport.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2018-2021 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import org.springframework.dao.support.PersistenceExceptionTranslator; - -/** - * Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as - * database name and exception translator. - *

    - * Not intended to be used directly. - * - * @author Christoph Strobl - * @author Mark Paluch - * @param Client type. - * @since 2.1 - * @see SimpleMongoClientDatabaseFactory - * @deprecated since 3.0, use {@link MongoDatabaseFactorySupport} instead. - */ -@Deprecated -public abstract class MongoDbFactorySupport extends MongoDatabaseFactorySupport { - - /** - * Create a new {@link MongoDbFactorySupport} object given {@code mongoClient}, {@code databaseName}, - * {@code mongoInstanceCreated} and {@link PersistenceExceptionTranslator}. - * - * @param mongoClient must not be {@literal null}. - * @param databaseName must not be {@literal null} or empty. - * @param mongoInstanceCreated {@literal true} if the client instance was created by a subclass of - * {@link MongoDbFactorySupport} to close the client on {@link #destroy()}. - * @param exceptionTranslator must not be {@literal null}. - */ - protected MongoDbFactorySupport(C mongoClient, String databaseName, boolean mongoInstanceCreated, - PersistenceExceptionTranslator exceptionTranslator) { - super(mongoClient, databaseName, mongoInstanceCreated, exceptionTranslator); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoEncryptionSettingsFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoEncryptionSettingsFactoryBean.java index 907e180d13..7aef5a3a82 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoEncryptionSettingsFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoEncryptionSettingsFactoryBean.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2021 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -88,10 +88,6 @@ public void setSchemaMap(Map schemaMap) { this.schemaMap = schemaMap; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObject() - */ @Override public AutoEncryptionSettings getObject() { @@ -109,10 +105,6 @@ private Map orEmpty(@Nullable Map source) { return source != null ? source : Collections.emptyMap(); } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ @Override public Class getObjectType() { return AutoEncryptionSettings.class; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoExceptionTranslator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoExceptionTranslator.java index b060d43262..1ec7d3ffc0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoExceptionTranslator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoExceptionTranslator.java @@ -1,5 +1,5 @@ /* - * Copyright 2010-2021 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,9 +15,6 @@ */ package org.springframework.data.mongodb.core; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; import java.util.Set; import org.bson.BsonInvalidOperationException; @@ -31,7 +28,7 @@ import org.springframework.dao.PermissionDeniedDataAccessException; import org.springframework.dao.support.PersistenceExceptionTranslator; import org.springframework.data.mongodb.ClientSessionException; -import org.springframework.data.mongodb.MongoTransactionException; +import org.springframework.data.mongodb.TransientClientSessionException; import org.springframework.data.mongodb.UncategorizedMongoDbException; import org.springframework.data.mongodb.util.MongoDbErrorCodes; import org.springframework.lang.Nullable; @@ -55,25 +52,30 @@ */ public class MongoExceptionTranslator implements PersistenceExceptionTranslator { - private static final Set DUPLICATE_KEY_EXCEPTIONS = new HashSet<>( - Arrays.asList("MongoException.DuplicateKey", "DuplicateKeyException")); + public static final MongoExceptionTranslator DEFAULT_EXCEPTION_TRANSLATOR = new MongoExceptionTranslator(); - private static final Set RESOURCE_FAILURE_EXCEPTIONS = new HashSet<>( - Arrays.asList("MongoException.Network", "MongoSocketException", "MongoException.CursorNotFound", - "MongoCursorNotFoundException", "MongoServerSelectionException", "MongoTimeoutException")); + private static final Set DUPLICATE_KEY_EXCEPTIONS = Set.of("MongoException.DuplicateKey", + "DuplicateKeyException"); - private static final Set RESOURCE_USAGE_EXCEPTIONS = new HashSet<>( - Collections.singletonList("MongoInternalException")); + private static final Set RESOURCE_FAILURE_EXCEPTIONS = Set.of("MongoException.Network", + "MongoSocketException", "MongoException.CursorNotFound", "MongoCursorNotFoundException", + "MongoServerSelectionException", "MongoTimeoutException"); - private static final Set DATA_INTEGRITY_EXCEPTIONS = new HashSet<>( - Arrays.asList("WriteConcernException", "MongoWriteException", "MongoBulkWriteException")); + private static final Set RESOURCE_USAGE_EXCEPTIONS = Set.of("MongoInternalException"); - /* - * (non-Javadoc) - * @see org.springframework.dao.support.PersistenceExceptionTranslator#translateExceptionIfPossible(java.lang.RuntimeException) - */ + private static final Set DATA_INTEGRITY_EXCEPTIONS = Set.of("WriteConcernException", "MongoWriteException", + "MongoBulkWriteException"); + + private static final Set SECURITY_EXCEPTIONS = Set.of("MongoCryptException"); + + @Override @Nullable public DataAccessException translateExceptionIfPossible(RuntimeException ex) { + return doTranslateException(ex); + } + + @Nullable + DataAccessException doTranslateException(RuntimeException ex) { // Check for well-known MongoException subclasses. @@ -102,12 +104,12 @@ public DataAccessException translateExceptionIfPossible(RuntimeException ex) { if (DATA_INTEGRITY_EXCEPTIONS.contains(exception)) { if (ex instanceof MongoServerException) { - if (((MongoServerException) ex).getCode() == 11000) { + if (MongoDbErrorCodes.isDataDuplicateKeyError(ex)) { return new DuplicateKeyException(ex.getMessage(), ex); } - if (ex instanceof MongoBulkWriteException) { - for (BulkWriteError x : ((MongoBulkWriteException) ex).getWriteErrors()) { - if (x.getCode() == 11000) { + if (ex instanceof MongoBulkWriteException bulkException) { + for (BulkWriteError writeError : bulkException.getWriteErrors()) { + if (MongoDbErrorCodes.isDuplicateKeyCode(writeError.getCode())) { return new DuplicateKeyException(ex.getMessage(), ex); } } @@ -118,23 +120,32 @@ public DataAccessException translateExceptionIfPossible(RuntimeException ex) { } // All other MongoExceptions - if (ex instanceof MongoException) { + if (ex instanceof MongoException mongoException) { - int code = ((MongoException) ex).getCode(); + int code = mongoException.getCode(); - if (MongoDbErrorCodes.isDuplicateKeyCode(code)) { + if (MongoDbErrorCodes.isDuplicateKeyError(mongoException)) { return new DuplicateKeyException(ex.getMessage(), ex); - } else if (MongoDbErrorCodes.isDataAccessResourceFailureCode(code)) { + } + if (MongoDbErrorCodes.isDataAccessResourceError(mongoException)) { return new DataAccessResourceFailureException(ex.getMessage(), ex); - } else if (MongoDbErrorCodes.isInvalidDataAccessApiUsageCode(code) || code == 10003 || code == 12001 - || code == 12010 || code == 12011 || code == 12012) { + } + if (MongoDbErrorCodes.isInvalidDataAccessApiUsageError(mongoException) || code == 12001 || code == 12010 + || code == 12011 || code == 12012) { return new InvalidDataAccessApiUsageException(ex.getMessage(), ex); - } else if (MongoDbErrorCodes.isPermissionDeniedCode(code)) { + } + if (MongoDbErrorCodes.isPermissionDeniedError(mongoException)) { + return new PermissionDeniedDataAccessException(ex.getMessage(), ex); + } + if (MongoDbErrorCodes.isDataIntegrityViolationError(mongoException)) { + return new DataIntegrityViolationException(mongoException.getMessage(), mongoException); + } + if (MongoDbErrorCodes.isClientSessionFailure(mongoException)) { + return isTransientFailure(mongoException) ? new TransientClientSessionException(ex.getMessage(), ex) + : new ClientSessionException(ex.getMessage(), ex); + } + if (ex.getCause() != null && SECURITY_EXCEPTIONS.contains(ClassUtils.getShortName(ex.getCause().getClass()))) { return new PermissionDeniedDataAccessException(ex.getMessage(), ex); - } else if (MongoDbErrorCodes.isClientSessionFailureCode(code)) { - return new ClientSessionException(ex.getMessage(), ex); - } else if (MongoDbErrorCodes.isTransactionFailureCode(code)) { - return new MongoTransactionException(ex.getMessage(), ex); } return new UncategorizedMongoDbException(ex.getMessage(), ex); @@ -155,4 +166,27 @@ public DataAccessException translateExceptionIfPossible(RuntimeException ex) { // that translation should not occur. return null; } + + /** + * Check if a given exception holds an error label indicating a transient failure. + * + * @param e the exception to inspect. + * @return {@literal true} if the given {@link Exception} is a {@link MongoException} holding one of the transient + * exception error labels. + * @see MongoException#hasErrorLabel(String) + * @since 4.4 + */ + public boolean isTransientFailure(Exception e) { + + if (e instanceof MongoException mongoException) { + return mongoException.hasErrorLabel(MongoException.TRANSIENT_TRANSACTION_ERROR_LABEL) + || mongoException.hasErrorLabel(MongoException.UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL); + } + + if (e.getCause() != e && e.getCause() instanceof Exception ex) { + return isTransientFailure(ex); + } + + return false; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java index f3c0dcd624..66b1cf209e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2021 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,7 +15,24 @@ */ package org.springframework.data.mongodb.core; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; +import java.util.function.Predicate; + +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.mapping.Unwrapped.Nullable; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; import org.springframework.util.Assert; @@ -24,6 +41,7 @@ * following mapping rules. *

    * Required Properties + *

    *
      *
    • Properties of primitive type
    • *
    @@ -45,7 +63,7 @@ * {@link org.springframework.data.annotation.Id _id} properties using types that can be converted into * {@link org.bson.types.ObjectId} like {@link String} will be mapped to {@code type : 'object'} unless there is more * specific information available via the {@link org.springframework.data.mongodb.core.mapping.MongoId} annotation. - *

    + * {@link Encrypted} properties will contain {@literal encrypt} information. * * @author Christoph Strobl * @since 2.2 @@ -60,6 +78,111 @@ public interface MongoJsonSchemaCreator { */ MongoJsonSchema createSchemaFor(Class type); + /** + * Create a merged {@link MongoJsonSchema} out of the individual schemas of the given types by merging their + * properties into one large {@link MongoJsonSchema schema}. + * + * @param types must not be {@literal null} nor contain {@literal null}. + * @return new instance of {@link MongoJsonSchema}. + * @since 3.4 + */ + default MongoJsonSchema mergedSchemaFor(Class... types) { + + MongoJsonSchema[] schemas = Arrays.stream(types).map(this::createSchemaFor).toArray(MongoJsonSchema[]::new); + return MongoJsonSchema.merge(schemas); + } + + /** + * Filter matching {@link JsonSchemaProperty properties}. + * + * @param filter the {@link Predicate} to evaluate for inclusion. Must not be {@literal null}. + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.3 + */ + MongoJsonSchemaCreator filter(Predicate filter); + + /** + * Entry point to specify additional behavior for a given path. + * + * @param path the path using {@literal dot '.'} notation. + * @return new instance of {@link PropertySpecifier}. + * @since 3.4 + */ + PropertySpecifier property(String path); + + /** + * The context in which a specific {@link #getProperty()} is encountered during schema creation. + * + * @since 3.3 + */ + interface JsonSchemaPropertyContext { + + /** + * The path to a given field/property in dot notation. + * + * @return never {@literal null}. + */ + String getPath(); + + /** + * The current property. + * + * @return never {@literal null}. + */ + MongoPersistentProperty getProperty(); + + /** + * Obtain the {@link MongoPersistentEntity} for a given property. + * + * @param property must not be {@literal null}. + * @param + * @return {@literal null} if the property is not an entity. It is nevertheless recommend to check + * {@link PersistentProperty#isEntity()} first. + */ + @Nullable + MongoPersistentEntity resolveEntity(MongoPersistentProperty property); + + } + + /** + * A filter {@link Predicate} that matches {@link Encrypted encrypted properties} and those having nested ones. + * + * @return new instance of {@link Predicate}. + * @since 3.3 + */ + static Predicate encryptedOnly() { + + return new Predicate() { + + // cycle guard + private final Set seen = new HashSet<>(); + + @Override + public boolean test(JsonSchemaPropertyContext context) { + return extracted(context.getProperty(), context); + } + + private boolean extracted(MongoPersistentProperty property, JsonSchemaPropertyContext context) { + if (property.isAnnotationPresent(Encrypted.class)) { + return true; + } + + if (!property.isEntity() || seen.contains(property)) { + return false; + } + + seen.add(property); + + for (MongoPersistentProperty nested : context.resolveEntity(property)) { + if (extracted(nested, context)) { + return true; + } + } + return false; + } + }; + } + /** * Creates a new {@link MongoJsonSchemaCreator} that is aware of conversions applied by the given * {@link MongoConverter}. @@ -69,7 +192,59 @@ public interface MongoJsonSchemaCreator { */ static MongoJsonSchemaCreator create(MongoConverter mongoConverter) { - Assert.notNull(mongoConverter, "MongoConverter must not be null!"); + Assert.notNull(mongoConverter, "MongoConverter must not be null"); return new MappingMongoJsonSchemaCreator(mongoConverter); } + + /** + * Creates a new {@link MongoJsonSchemaCreator} that is aware of type mappings and potential + * {@link org.springframework.data.spel.spi.EvaluationContextExtension extensions}. + * + * @param mappingContext must not be {@literal null}. + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.3 + */ + static MongoJsonSchemaCreator create(MappingContext mappingContext) { + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(config -> {})); + converter.afterPropertiesSet(); + + return create(converter); + } + + /** + * Creates a new {@link MongoJsonSchemaCreator} that does not consider potential extensions - suitable for testing. We + * recommend to use {@link #create(MappingContext)}. + * + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.3 + */ + static MongoJsonSchemaCreator create() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setSimpleTypeHolder(MongoSimpleTypes.HOLDER); + mappingContext.afterPropertiesSet(); + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(config -> {})); + converter.afterPropertiesSet(); + + return create(converter); + } + + /** + * @author Christoph Strobl + * @since 3.4 + */ + interface PropertySpecifier { + + /** + * Set additional type parameters for polymorphic ones. + * + * @param types must not be {@literal null}. + * @return the source + */ + MongoJsonSchemaCreator withTypes(Class... types); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java index e4a4b0868f..65396bc7fe 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java @@ -1,5 +1,5 @@ /* - * Copyright 2011-2021 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,31 +18,37 @@ import java.util.Collection; import java.util.List; import java.util.Set; +import java.util.concurrent.locks.ReentrantLock; import java.util.function.Consumer; import java.util.function.Supplier; +import java.util.stream.Stream; import org.bson.Document; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; import org.springframework.data.geo.GeoResults; import org.springframework.data.mongodb.core.BulkOperations.BulkMode; import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationOperation; import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; import org.springframework.data.mongodb.core.aggregation.AggregationResults; import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; import org.springframework.data.mongodb.core.aggregation.TypedAggregation; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.data.mongodb.core.index.IndexOperations; -import org.springframework.data.mongodb.core.mapreduce.GroupBy; -import org.springframework.data.mongodb.core.mapreduce.GroupByResults; import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; import org.springframework.data.mongodb.core.mapreduce.MapReduceResults; import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; import org.springframework.data.mongodb.core.query.UpdateDefinition; -import org.springframework.data.util.CloseableIterator; +import org.springframework.data.util.Lock; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; @@ -57,8 +63,7 @@ /** * Interface that specifies a basic set of MongoDB operations. Implemented by {@link MongoTemplate}. Not often used but * a useful option for extensibility and testability (as it can be easily mocked, stubbed, or be the target of a JDK - * proxy). - *

    + * proxy).
    * NOTE: Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB * specific documentation to learn more about Multi * Document Transactions. @@ -72,6 +77,7 @@ * @author Thomas Darimont * @author Maninder Singh * @author Mark Paluch + * @author Woojin Shin */ public interface MongoOperations extends FluentMongoOperations { @@ -80,11 +86,12 @@ public interface MongoOperations extends FluentMongoOperations { * * @param entityClass must not be {@literal null}. * @return never {@literal null}. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be derived from the type. */ String getCollectionName(Class entityClass); /** - * Execute the a MongoDB command expressed as a JSON string. Parsing is delegated to {@link Document#parse(String)} to + * Execute a MongoDB command expressed as a JSON string. Parsing is delegated to {@link Document#parse(String)} to * obtain the {@link Document} holding the actual command. Any errors that result from executing this command will be * converted into Spring's DAO exception hierarchy. * @@ -116,7 +123,7 @@ public interface MongoOperations extends FluentMongoOperations { /** * Execute a MongoDB query and iterate over the query results on a per-document basis with a DocumentCallbackHandler. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param collectionName name of the collection to retrieve the objects from. * @param dch the handler that will extract results, one document at a time. @@ -124,8 +131,7 @@ public interface MongoOperations extends FluentMongoOperations { void executeQuery(Query query, String collectionName, DocumentCallbackHandler dch); /** - * Executes a {@link DbCallback} translating any exceptions as necessary. - *

    + * Executes a {@link DbCallback} translating any exceptions as necessary.
    * Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. Must not @@ -137,8 +143,7 @@ public interface MongoOperations extends FluentMongoOperations { T execute(DbCallback action); /** - * Executes the given {@link CollectionCallback} on the entity collection of the specified class. - *

    + * Executes the given {@link CollectionCallback} on the entity collection of the specified class.
    * Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param entityClass class that determines the collection to use. Must not be {@literal null}. @@ -150,8 +155,7 @@ public interface MongoOperations extends FluentMongoOperations { T execute(Class entityClass, CollectionCallback action); /** - * Executes the given {@link CollectionCallback} on the collection of the given name. - *

    + * Executes the given {@link CollectionCallback} on the collection of the given name.
    * Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param collectionName the name of the collection that specifies which {@link MongoCollection} instance will be @@ -175,8 +179,7 @@ public interface MongoOperations extends FluentMongoOperations { /** * Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession} - * provided by the given {@link Supplier} to each and every command issued against MongoDB. - *

    + * provided by the given {@link Supplier} to each and every command issued against MongoDB.
    * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use the * {@link SessionScoped#execute(SessionCallback, Consumer)} hook to potentially close the {@link ClientSession}. * @@ -185,21 +188,22 @@ public interface MongoOperations extends FluentMongoOperations { */ default SessionScoped withSession(Supplier sessionProvider) { - Assert.notNull(sessionProvider, "SessionProvider must not be null!"); + Assert.notNull(sessionProvider, "SessionProvider must not be null"); return new SessionScoped() { - private final Object lock = new Object(); - private @Nullable ClientSession session = null; + private final Lock lock = Lock.of(new ReentrantLock()); + private @Nullable ClientSession session; @Override public T execute(SessionCallback action, Consumer onComplete) { - synchronized (lock) { + lock.executeWithoutResult(() -> { + if (session == null) { session = sessionProvider.get(); } - } + }); try { return action.doInSession(MongoOperations.this.withSession(session)); @@ -211,8 +215,7 @@ public T execute(SessionCallback action, Consumer onComple } /** - * Obtain a {@link ClientSession} bound instance of {@link MongoOperations}. - *

    + * Obtain a {@link ClientSession} bound instance of {@link MongoOperations}.
    * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. * * @param session must not be {@literal null}. @@ -225,34 +228,34 @@ public T execute(SessionCallback action, Consumer onComple * Executes the given {@link Query} on the entity collection of the specified {@code entityType} backed by a Mongo DB * {@link com.mongodb.client.FindIterable}. *

    - * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to - * be closed. + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.FindIterable} that needs to be closed. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param entityType must not be {@literal null}. * @param element return type - * @return will never be {@literal null}. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). * @since 1.7 */ - CloseableIterator stream(Query query, Class entityType); + Stream stream(Query query, Class entityType); /** * Executes the given {@link Query} on the entity collection of the specified {@code entityType} and collection backed * by a Mongo DB {@link com.mongodb.client.FindIterable}. *

    - * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to - * be closed. + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.FindIterable} that needs to be closed. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param entityType must not be {@literal null}. * @param collectionName must not be {@literal null} or empty. * @param element return type - * @return will never be {@literal null}. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). * @since 1.10 */ - CloseableIterator stream(Query query, Class entityType, String collectionName); + Stream stream(Query query, Class entityType, String collectionName); /** * Create an uncapped collection with a name based on the provided entity class. @@ -288,6 +291,58 @@ public T execute(SessionCallback action, Consumer onComple */ MongoCollection createCollection(String collectionName, @Nullable CollectionOptions collectionOptions); + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationOperation pipeline + * stages} on another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param stages the {@link AggregationOperation aggregation pipeline stages} defining the view content. + * @since 4.0 + */ + default MongoCollection createView(String name, Class source, AggregationOperation... stages) { + return createView(name, source, AggregationPipeline.of(stages)); + } + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @since 4.0 + */ + default MongoCollection createView(String name, Class source, AggregationPipeline pipeline) { + return createView(name, source, pipeline, null); + } + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @param options additional settings to apply when creating the view. Can be {@literal null}. + * @since 4.0 + */ + MongoCollection createView(String name, Class source, AggregationPipeline pipeline, + @Nullable ViewOptions options); + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given source. + * + * @param name the name of the view to create. + * @param source the name of the collection or view defining the to be created views source. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @param options additional settings to apply when creating the view. Can be {@literal null}. + * @since 4.0 + */ + MongoCollection createView(String name, String source, AggregationPipeline pipeline, + @Nullable ViewOptions options); + /** * A set of collection names. * @@ -299,8 +354,7 @@ public T execute(SessionCallback action, Consumer onComple * Get a {@link MongoCollection} by its name. The returned collection may not exists yet (except in local memory) and * is created on first interaction with the server. Collections can be explicitly created via * {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class) - * exists} first. - *

    + * exists} first.
    * Translate any exceptions as necessary. * * @param collectionName name of the collection. Must not be {@literal null}. @@ -309,8 +363,7 @@ public T execute(SessionCallback action, Consumer onComple MongoCollection getCollection(String collectionName); /** - * Check to see if a collection with a name indicated by the entity class exists. - *

    + * Check to see if a collection with a name indicated by the entity class exists.
    * Translate any exceptions as necessary. * * @param entityClass class that determines the name of the collection. Must not be {@literal null}. @@ -319,8 +372,7 @@ public T execute(SessionCallback action, Consumer onComple boolean collectionExists(Class entityClass); /** - * Check to see if a collection with a given name exists. - *

    + * Check to see if a collection with a given name exists.
    * Translate any exceptions as necessary. * * @param collectionName name of the collection. Must not be {@literal null}. @@ -329,8 +381,7 @@ public T execute(SessionCallback action, Consumer onComple boolean collectionExists(String collectionName); /** - * Drop the collection with the name indicated by the entity class. - *

    + * Drop the collection with the name indicated by the entity class.
    * Translate any exceptions as necessary. * * @param entityClass class that determines the collection to drop/delete. Must not be {@literal null}. @@ -338,8 +389,7 @@ public T execute(SessionCallback action, Consumer onComple void dropCollection(Class entityClass); /** - * Drop the collection with the given name. - *

    + * Drop the collection with the given name.
    * Translate any exceptions as necessary. * * @param collectionName name of the collection to drop/delete. @@ -402,11 +452,9 @@ public T execute(SessionCallback action, Consumer onComple BulkOperations bulkOps(BulkMode mode, @Nullable Class entityType, String collectionName); /** - * Query for a list of objects of type T from the collection used by the entity class. - *

    + * Query for a list of objects of type T from the collection used by the entity class.
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. * @@ -416,11 +464,9 @@ public T execute(SessionCallback action, Consumer onComple List findAll(Class entityClass); /** - * Query for a list of objects of type T from the specified collection. - *

    + * Query for a list of objects of type T from the specified collection.
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. * @@ -430,43 +476,6 @@ public T execute(SessionCallback action, Consumer onComple */ List findAll(Class entityClass, String collectionName); - /** - * Execute a group operation over the entire collection. The group operation entity class should match the 'shape' of - * the returned object that takes int account the initial document structure as well as any finalize functions. - * - * @param inputCollectionName the collection where the group operation will read from - * @param groupBy the conditions under which the group operation will be performed, e.g. keys, initial document, - * reduce function. - * @param entityClass The parametrized type of the returned list - * @return The results of the group operation - * @deprecated since 2.2. The {@code group} command has been removed in MongoDB Server 4.2.0.
    - * Please use {@link #aggregate(TypedAggregation, String, Class) } with a - * {@link org.springframework.data.mongodb.core.aggregation.GroupOperation} instead. - */ - @Deprecated - GroupByResults group(String inputCollectionName, GroupBy groupBy, Class entityClass); - - /** - * Execute a group operation restricting the rows to those which match the provided Criteria. The group operation - * entity class should match the 'shape' of the returned object that takes int account the initial document structure - * as well as any finalize functions. - * - * @param criteria The criteria that restricts the row that are considered for grouping. If not specified all rows are - * considered. - * @param inputCollectionName the collection where the group operation will read from - * @param groupBy the conditions under which the group operation will be performed, e.g. keys, initial document, - * reduce function. - * @param entityClass The parametrized type of the returned list - * @return The results of the group operation - * @deprecated since 2.2. The {@code group} command has been removed in MongoDB Server 4.2.0.
    - * Please use {@link #aggregate(TypedAggregation, String, Class) } with a - * {@link org.springframework.data.mongodb.core.aggregation.GroupOperation} and - * {@link org.springframework.data.mongodb.core.aggregation.MatchOperation} instead. - */ - @Deprecated - GroupByResults group(@Nullable Criteria criteria, String inputCollectionName, GroupBy groupBy, - Class entityClass); - /** * Execute an aggregation operation. The raw results will be mapped to the given entity class. The name of the * inputCollection is derived from the inputType of the aggregation. @@ -521,9 +530,9 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN /** * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. *

    - * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that - * needs to be closed. The raw results will be mapped to the given entity class. The name of the inputCollection is - * derived from the inputType of the aggregation. + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.AggregateIterable} that needs to be + * closed. The raw results will be mapped to the given entity class. The name of the inputCollection is derived from + * the inputType of the aggregation. *

    * Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling * explanation mode will throw an {@link IllegalArgumentException}. @@ -532,35 +541,37 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN * {@literal null}. * @param collectionName The name of the input collection to use for the aggreation. * @param outputType The parametrized type of the returned list, must not be {@literal null}. - * @return The results of the aggregation operation. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). * @since 2.0 */ - CloseableIterator aggregateStream(TypedAggregation aggregation, String collectionName, Class outputType); + Stream aggregateStream(TypedAggregation aggregation, String collectionName, Class outputType); /** * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. - *

    - * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that - * needs to be closed. The raw results will be mapped to the given entity class and are returned as stream. The name - * of the inputCollection is derived from the inputType of the aggregation. - *

    + *

    + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.AggregateIterable} that needs to be + * closed. The raw results will be mapped to the given entity class and are returned as stream. The name of the + * inputCollection is derived from the inputType of the aggregation. + *

    * Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling * explanation mode will throw an {@link IllegalArgumentException}. * * @param aggregation The {@link TypedAggregation} specification holding the aggregation operations, must not be * {@literal null}. * @param outputType The parametrized type of the returned list, must not be {@literal null}. - * @return The results of the aggregation operation. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). * @since 2.0 */ - CloseableIterator aggregateStream(TypedAggregation aggregation, Class outputType); + Stream aggregateStream(TypedAggregation aggregation, Class outputType); /** * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. - *

    - * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that - * needs to be closed. The raw results will be mapped to the given entity class. - *

    + *

    + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.AggregateIterable} that needs to be + * closed. The raw results will be mapped to the given entity class. + *

    * Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling * explanation mode will throw an {@link IllegalArgumentException}. * @@ -569,17 +580,18 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN * @param inputType the inputType where the aggregation operation will read from, must not be {@literal null} or * empty. * @param outputType The parametrized type of the returned list, must not be {@literal null}. - * @return The results of the aggregation operation. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). * @since 2.0 */ - CloseableIterator aggregateStream(Aggregation aggregation, Class inputType, Class outputType); + Stream aggregateStream(Aggregation aggregation, Class inputType, Class outputType); /** * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. - *

    - * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that - * needs to be closed. The raw results will be mapped to the given entity class. - *

    + *

    + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.AggregateIterable} that needs to be + * closed. The raw results will be mapped to the given entity class. + *

    * Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling * explanation mode will throw an {@link IllegalArgumentException}. * @@ -588,10 +600,11 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN * @param collectionName the collection where the aggregation operation will read from, must not be {@literal null} or * empty. * @param outputType The parametrized type of the returned list, must not be {@literal null}. - * @return The results of the aggregation operation. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). * @since 2.0 */ - CloseableIterator aggregateStream(Aggregation aggregation, String collectionName, Class outputType); + Stream aggregateStream(Aggregation aggregation, String collectionName, Class outputType); /** * Execute a map-reduce operation. The map-reduce operation will be formed with an output type of INLINE @@ -601,7 +614,9 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN * @param reduceFunction The JavaScript reduce function * @param entityClass The parametrized type of the returned list. Must not be {@literal null}. * @return The results of the map reduce operation + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated MapReduceResults mapReduce(String inputCollectionName, String mapFunction, String reduceFunction, Class entityClass); @@ -614,7 +629,9 @@ MapReduceResults mapReduce(String inputCollectionName, String mapFunction * @param mapReduceOptions Options that specify detailed map-reduce behavior. * @param entityClass The parametrized type of the returned list. Must not be {@literal null}. * @return The results of the map reduce operation + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated MapReduceResults mapReduce(String inputCollectionName, String mapFunction, String reduceFunction, @Nullable MapReduceOptions mapReduceOptions, Class entityClass); @@ -628,7 +645,9 @@ MapReduceResults mapReduce(String inputCollectionName, String mapFunction * @param reduceFunction The JavaScript reduce function * @param entityClass The parametrized type of the returned list. Must not be {@literal null}. * @return The results of the map reduce operation + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated MapReduceResults mapReduce(Query query, String inputCollectionName, String mapFunction, String reduceFunction, Class entityClass); @@ -642,7 +661,9 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin * @param mapReduceOptions Options that specify detailed map-reduce behavior * @param entityClass The parametrized type of the returned list. Must not be {@literal null}. * @return The results of the map reduce operation + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated MapReduceResults mapReduce(Query query, String inputCollectionName, String mapFunction, String reduceFunction, @Nullable MapReduceOptions mapReduceOptions, Class entityClass); @@ -701,15 +722,13 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin /** * Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the - * specified type. - *

    + * specified type.
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned list. * @return the converted object. @@ -719,15 +738,13 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified - * type. - *

    + * type.
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned list. * @param collectionName name of the collection to retrieve the objects from. @@ -741,7 +758,7 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin * NOTE: Any additional support for query/field mapping, etc. is not available due to the lack of * domain type information. Use {@link #exists(Query, Class, String)} to get full type specific support. * - * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param query the {@link Query} class that specifies the criteria used to find a document. * @param collectionName name of the collection to check for objects. * @return {@literal true} if the query yields a result. */ @@ -750,7 +767,7 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin /** * Determine result of given {@link Query} contains at least one element. * - * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param query the {@link Query} class that specifies the criteria used to find a document. * @param entityClass the parametrized type. * @return {@literal true} if the query yields a result. */ @@ -759,7 +776,7 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin /** * Determine result of given {@link Query} contains at least one element. * - * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param query the {@link Query} class that specifies the criteria used to find a document. * @param entityClass the parametrized type. Can be {@literal null}. * @param collectionName name of the collection to check for objects. * @return {@literal true} if the query yields a result. @@ -767,15 +784,13 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin boolean exists(Query query, @Nullable Class entityClass, String collectionName); /** - * Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type. - *

    + * Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type.
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param entityClass the parametrized type of the returned list. Must not be {@literal null}. * @return the List of converted objects. @@ -783,15 +798,13 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin List find(Query query, Class entityClass); /** - * Map the results of an ad-hoc query on the specified collection to a List of the specified type. - *

    + * Map the results of an ad-hoc query on the specified collection to a List of the specified type.
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param entityClass the parametrized type of the returned list. Must not be {@literal null}. * @param collectionName name of the collection to retrieve the objects from. Must not be {@literal null}. @@ -799,6 +812,57 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin */ List find(Query query, Class entityClass, String collectionName); + /** + * Query for a window of objects of type T from the specified collection.
    + * Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with + * {@link Query#limit(int)} to limit large query results for efficient scrolling.
    + * Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}. + * Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    + * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + *

    + * When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort + * sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or + * {@code null} values through {@code $gt/$lt} operators. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityType the parametrized type of the returned window. + * @return the converted window. + * @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid + * position. + * @since 4.1 + * @see Query#with(org.springframework.data.domain.OffsetScrollPosition) + * @see Query#with(org.springframework.data.domain.KeysetScrollPosition) + */ + Window scroll(Query query, Class entityType); + + /** + * Query for a window of objects of type T from the specified collection.
    + * Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with + * {@link Query#limit(int)} to limit large query results for efficient scrolling.
    + * Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}. + * Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    + * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + *

    + * When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort + * sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or + * {@code null} values through {@code $gt/$lt} operators. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityType the parametrized type of the returned window. + * @param collectionName name of the collection to retrieve the objects from. + * @return the converted window. + * @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid + * position. + * @since 4.1 + * @see Query#with(org.springframework.data.domain.OffsetScrollPosition) + * @see Query#with(org.springframework.data.domain.KeysetScrollPosition) + */ + Window scroll(Query query, Class entityType, String collectionName); + /** * Returns a document with the given id mapped onto the given class. The collection the query is ran against will be * derived from the given target class as well. @@ -881,11 +945,14 @@ default List findDistinct(Query query, String field, String collection, C } /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. * @param entityClass the parametrized type. Must not be {@literal null}. * @return the converted object that was updated before it was updated or {@literal null}, if not found. @@ -897,11 +964,14 @@ default List findDistinct(Query query, String field, String collection, C T findAndModify(Query query, UpdateDefinition update, Class entityClass); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. * @param entityClass the parametrized type. Must not be {@literal null}. * @param collectionName the collection to query. Must not be {@literal null}. @@ -914,12 +984,15 @@ default List findDistinct(Query query, String field, String collection, C T findAndModify(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. * @param update the {@link UpdateDefinition} to apply on matching documents. * @param options the {@link FindAndModifyOptions} holding additional information. * @param entityClass the parametrized type. @@ -934,12 +1007,15 @@ default List findDistinct(Query query, String field, String collection, C T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @param entityClass the parametrized type. Must not be {@literal null}. @@ -957,17 +1033,19 @@ T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions o /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} * document.
    * The collection name is derived from the {@literal replacement} type.
    * Options are defaulted to {@link FindAndReplaceOptions#empty()}.
    * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @return the converted object that was updated or {@literal null}, if not found. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. * @since 2.1 */ @Nullable @@ -977,14 +1055,14 @@ default T findAndReplace(Query query, T replacement) { /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} * document.
    * Options are defaulted to {@link FindAndReplaceOptions#empty()}.
    * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param collectionName the collection to query. Must not be {@literal null}. * @return the converted object that was updated or {@literal null}, if not found. @@ -997,18 +1075,20 @@ default T findAndReplace(Query query, T replacement, String collectionName) /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
    * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or * as it is after the update. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. * @since 2.1 */ @Nullable @@ -1018,13 +1098,13 @@ default T findAndReplace(Query query, T replacement, FindAndReplaceOptions o /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
    * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of @@ -1035,19 +1115,19 @@ default T findAndReplace(Query query, T replacement, FindAndReplaceOptions o @Nullable default T findAndReplace(Query query, T replacement, FindAndReplaceOptions options, String collectionName) { - Assert.notNull(replacement, "Replacement must not be null!"); + Assert.notNull(replacement, "Replacement must not be null"); return findAndReplace(query, replacement, options, (Class) ClassUtils.getUserClass(replacement), collectionName); } /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
    * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @param entityType the parametrized type. Must not be {@literal null}. @@ -1066,13 +1146,13 @@ default T findAndReplace(Query query, T replacement, FindAndReplaceOptions o /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
    * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @param entityType the type used for mapping the {@link Query} to domain type fields and deriving the collection @@ -1082,6 +1162,8 @@ default T findAndReplace(Query query, T replacement, FindAndReplaceOptions o * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or * as it is after the update. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. * @since 2.1 */ @Nullable @@ -1094,13 +1176,13 @@ default T findAndReplace(Query query, S replacement, FindAndReplaceOption /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
    * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @param entityType the type used for mapping the {@link Query} to domain type fields. Must not be {@literal null}. @@ -1119,14 +1201,12 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option /** * Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the * specified type. The first document that matches the query is returned and also removed from the collection in the - * database. - *

    - * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. - *

    + * database.
    + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}.
    * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned list. * @return the converted object @@ -1137,14 +1217,13 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified * type. The first document that matches the query is returned and also removed from the collection in the database. - *

    + *
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned list. * @param collectionName name of the collection to retrieve the objects from. @@ -1159,18 +1238,19 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct * influence on the resulting number of documents found as those values are passed on to the server and potentially * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to - * count all matches. - *

    - * This method uses an + * count all matches.
    + * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) - * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees - * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use - * {@link #estimatedCount(Class)} for empty queries instead. + * aggregation execution} which may have an impact on performance. * * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be * {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. * @return the count of matching documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @see #exactCount(Query, Class) + * @see #estimatedCount(Class) */ long count(Query query, Class entityClass); @@ -1181,41 +1261,61 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct * influence on the resulting number of documents found as those values are passed on to the server and potentially * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to - * count all matches. - *

    - * This method uses an + * count all matches.
    + * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) - * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees - * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use - * {@link #estimatedCount(String)} for empty queries instead. + * aggregation execution} which may have an impact on performance. * * @param query the {@link Query} class that specifies the criteria used to find documents. * @param collectionName must not be {@literal null} or empty. * @return the count of matching documents. * @see #count(Query, Class, String) + * @see #exactCount(Query, String) + * @see #estimatedCount(String) */ long count(Query query, String collectionName); + /** + * Returns the number of documents for the given {@link Query} by querying the given collection using the given entity + * class to map the given {@link Query}.
    + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
    + * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} which may have an impact on performance. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be + * {@literal null}. + * @param entityClass the parametrized type. Can be {@literal null}. + * @param collectionName must not be {@literal null} or empty. + * @return the count of matching documents. + * @see #count(Query, Class, String) + * @see #estimatedCount(String) + */ + long count(Query query, @Nullable Class entityClass, String collectionName); + /** * Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type}, - * based on collection statistics. - *

    + * based on collection statistics.
    * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside * transactions. * * @param entityClass must not be {@literal null}. * @return the estimated number of documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. * @since 3.1 */ default long estimatedCount(Class entityClass) { - Assert.notNull(entityClass, "Entity class must not be null!"); + Assert.notNull(entityClass, "Entity class must not be null"); return estimatedCount(getCollectionName(entityClass)); } /** - * Estimate the number of documents in the given collection based on collection statistics. - *

    + * Estimate the number of documents in the given collection based on collection statistics.
    * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside * transactions. * @@ -1225,14 +1325,62 @@ default long estimatedCount(Class entityClass) { */ long estimatedCount(String collectionName); + /** + * Returns the number of documents for the given {@link Query} by querying the collection of the given entity class. + *
    + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
    + * This method uses an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(Class)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be + * {@literal null}. + * @param entityClass class that determines the collection to use. Must not be {@literal null}. + * @return the count of matching documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.4 + */ + default long exactCount(Query query, Class entityClass) { + return exactCount(query, entityClass, getCollectionName(entityClass)); + } + + /** + * Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query} + * must solely consist of document field references as we lack type information to map potential property references + * onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
    + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
    + * This method uses an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(String)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. + * @param collectionName must not be {@literal null} or empty. + * @return the count of matching documents. + * @see #count(Query, Class, String) + * @since 3.4 + */ + default long exactCount(Query query, String collectionName) { + return exactCount(query, null, collectionName); + } + /** * Returns the number of documents for the given {@link Query} by querying the given collection using the given entity * class to map the given {@link Query}.
    * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct * influence on the resulting number of documents found as those values are passed on to the server and potentially * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to - * count all matches. - *

    + * count all matches.
    * This method uses an * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees @@ -1244,38 +1392,42 @@ default long estimatedCount(Class entityClass) { * @param entityClass the parametrized type. Can be {@literal null}. * @param collectionName must not be {@literal null} or empty. * @return the count of matching documents. + * @since 3.4 */ - long count(Query query, @Nullable Class entityClass, String collectionName); + long exactCount(Query query, @Nullable Class entityClass, String collectionName); /** - * Insert the object into the collection for the entity type of the object to save. - *

    - * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. - *

    - * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a - * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * Insert the object into the collection for the entity type of the object to save.
    + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
    + * If your object has an {@literal Id} property which holds a {@literal null} value, it will be set with the generated + * Id from MongoDB. If your Id property is a String then MongoDB ObjectId will be used to populate that string. + * Otherwise, the conversion from ObjectId to your property type will be handled by Spring's BeanWrapper class that + * leverages Type Conversion API. See * Spring's - * Type Conversion" for more details. - *

    - * Insert is used to initially store the object into the database. To update an existing object use the save method. - *

    + * Type Conversion" for more details.
    + * Insert is used to initially store the object into the database. To update an existing object use the + * {@link #save(Object)} method. + *

    + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + *

    * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @return the inserted object. * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. */ T insert(T objectToSave); /** - * Insert the object into the specified collection. - *

    + * Insert the object into the specified collection.
    * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * Insert is used to initially store the object into the database. To update an existing object use the save method. - *

    + *

    + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + *

    * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. @@ -1287,15 +1439,27 @@ default long estimatedCount(Class entityClass) { /** * Insert a Collection of objects into a collection in a single batch write to the database. + *

    + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

    + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param batchToSave the batch of objects to save. Must not be {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. * @return the inserted objects that. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. */ Collection insert(Collection batchToSave, Class entityClass); /** * Insert a batch of objects into the specified collection in a single batch write to the database. + *

    + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

    + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param batchToSave the list of objects to save. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. @@ -1306,70 +1470,91 @@ default long estimatedCount(Class entityClass) { /** * Insert a mixed Collection of objects into a database collection determining the collection name to use based on the * class. + *

    + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

    + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param objectsToSave the list of objects to save. Must not be {@literal null}. * @return the inserted objects. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} for the given objects. */ Collection insertAll(Collection objectsToSave); /** * Save the object to the collection for the entity type of the object to save. This will perform an insert if the - * object is not already present, that is an 'upsert'. - *

    + * object is not already present, that is an 'upsert'.
    * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See * Spring's * Type Conversion" for more details. - *

    + *

    + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. + *

    * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @return the saved object. * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. */ T save(T objectToSave); /** * Save the object to the specified collection. This will perform an insert if the object is not already present, that - * is an 'upsert'. - *

    + * is an 'upsert'.
    * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's Type - * Conversion" for more details. - *

    + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * Spring's Type + * Conversion for more details. + *

    + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. + *

    * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. * @return the saved object. * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. */ T save(T objectToSave, String collectionName); /** * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by - * combining the query document and the update document.
    + * combining the query document and the update document. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + *

    * NOTE: {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}. * Use {@link #findAndModify(Query, UpdateDefinition, FindAndModifyOptions, Class, String)} instead. * - * @param query the query document that specifies the criteria used to select a record to be upserted. Must not be + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be * {@literal null}. * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate * the existing object. Must not be {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. - * @since 3.0 * @see Update * @see AggregationUpdate + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.0 */ UpdateResult upsert(Query query, UpdateDefinition update, Class entityClass); @@ -1382,7 +1567,7 @@ default long estimatedCount(Class entityClass) { * NOTE: {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}. * Use {@link #findAndModify(Query, UpdateDefinition, FindAndModifyOptions, Class, String)} instead. * - * @param query the query document that specifies the criteria used to select a record to be upserted. Must not be + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be * {@literal null}. * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate * the existing object. Must not be {@literal null}. @@ -1397,8 +1582,11 @@ default long estimatedCount(Class entityClass) { /** * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by * combining the query document and the update document. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be upserted. Must not be + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be * {@literal null}. * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate * the existing object. Must not be {@literal null}. @@ -1414,16 +1602,22 @@ default long estimatedCount(Class entityClass) { /** * Updates the first object that is found in the collection of the entity class that matches the query document with * the provided update document. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be - * {@literal null}. + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate * the existing. Must not be {@literal null}. * @param entityClass class that determines the collection to use. * @return the {@link UpdateResult} which lets you access the results of the previous write. - * @since 3.0 * @see Update * @see AggregationUpdate + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.0 */ UpdateResult updateFirst(Query query, UpdateDefinition update, Class entityClass); @@ -1432,12 +1626,11 @@ default long estimatedCount(Class entityClass) { * the provided updated document.
    * NOTE: Any additional support for field mapping, versions, etc. is not available due to the lack of * domain type information. Use {@link #updateFirst(Query, UpdateDefinition, Class, String)} to get full type specific - * support.
    - * NOTE: {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}. - * Use {@link #findAndModify(Query, UpdateDefinition, Class, String)} instead. + * support. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be - * {@literal null}. + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate * the existing. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. @@ -1450,10 +1643,14 @@ default long estimatedCount(Class entityClass) { /** * Updates the first object that is found in the specified collection that matches the query document criteria with - * the provided updated document.
    + * the provided updated document. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be auto + * incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be - * {@literal null}. + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate * the existing. Must not be {@literal null}. * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. @@ -1468,16 +1665,21 @@ default long estimatedCount(Class entityClass) { /** * Updates all objects that are found in the collection for the entity class that matches the query document criteria * with the provided updated document. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be auto + * incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be * {@literal null}. * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate * the existing. Must not be {@literal null}. * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. - * @since 3.0 + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. * @see Update * @see AggregationUpdate + * @since 3.0 */ UpdateResult updateMulti(Query query, UpdateDefinition update, Class entityClass); @@ -1488,7 +1690,7 @@ default long estimatedCount(Class entityClass) { * domain type information. Use {@link #updateMulti(Query, UpdateDefinition, Class, String)} to get full type specific * support. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be * {@literal null}. * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate * the existing. Must not be {@literal null}. @@ -1503,8 +1705,11 @@ default long estimatedCount(Class entityClass) { /** * Updates all objects that are found in the collection for the entity class that matches the query document criteria * with the provided updated document. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be auto + * incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be * {@literal null}. * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate * the existing. Must not be {@literal null}. @@ -1525,6 +1730,8 @@ default long estimatedCount(Class entityClass) { * * @param object must not be {@literal null}. * @return the {@link DeleteResult} which lets you access the results of the previous delete. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. */ DeleteResult remove(Object object); @@ -1535,29 +1742,33 @@ default long estimatedCount(Class entityClass) { * acknowledged} remove operation was successful or not. * * @param object must not be {@literal null}. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. */ DeleteResult remove(Object object, String collectionName); /** - * Remove all documents that match the provided query document criteria from the the collection used to store the + * Remove all documents that match the provided query document criteria from the collection used to store the * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. * - * @param query the query document that specifies the criteria used to remove a record. + * @param query the query document that specifies the criteria used to remove a document. * @param entityClass class that determines the collection to use. * @return the {@link DeleteResult} which lets you access the results of the previous delete. * @throws IllegalArgumentException when {@literal query} or {@literal entityClass} is {@literal null}. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. */ DeleteResult remove(Query query, Class entityClass); /** - * Remove all documents that match the provided query document criteria from the the collection used to store the + * Remove all documents that match the provided query document criteria from the collection used to store the * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. * - * @param query the query document that specifies the criteria used to remove a record. + * @param query the query document that specifies the criteria used to remove a document. * @param entityClass class of the pojo to be operated on. Can be {@literal null}. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. * @throws IllegalArgumentException when {@literal query}, {@literal entityClass} or {@literal collectionName} is * {@literal null}. @@ -1570,8 +1781,9 @@ default long estimatedCount(Class entityClass) { * NOTE: Any additional support for field mapping is not available due to the lack of domain type * information. Use {@link #remove(Query, Class, String)} to get full type specific support. * - * @param query the query document that specifies the criteria used to remove a record. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param query the query document that specifies the criteria used to remove a document. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. * @throws IllegalArgumentException when {@literal query} or {@literal collectionName} is {@literal null}. */ @@ -1583,7 +1795,8 @@ default long estimatedCount(Class entityClass) { * information. Use {@link #findAllAndRemove(Query, Class, String)} to get full type specific support. * * @param query the query document that specifies the criteria used to find and remove documents. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link List} converted objects deleted by this operation. * @since 1.5 */ @@ -1595,23 +1808,97 @@ default long estimatedCount(Class entityClass) { * @param query the query document that specifies the criteria used to find and remove documents. * @param entityClass class of the pojo to be operated on. * @return the {@link List} converted objects deleted by this operation. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. * @since 1.5 */ List findAllAndRemove(Query query, Class entityClass); /** - * Returns and removes all documents that match the provided query document criteria from the the collection used to - * store the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in - * the query. + * Returns and removes all documents that match the provided query document criteria from the collection used to store + * the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the + * query. * * @param query the query document that specifies the criteria used to find and remove documents. * @param entityClass class of the pojo to be operated on. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link List} converted objects deleted by this operation. * @since 1.5 */ List findAllAndRemove(Query query, Class entityClass, String collectionName); + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document.
    + * The collection name is derived from the {@literal replacement} type.
    + * Options are defaulted to {@link ReplaceOptions#none()}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 4.2 + */ + default UpdateResult replace(Query query, T replacement) { + return replace(query, replacement, ReplaceOptions.none()); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document. Options are defaulted to {@link ReplaceOptions#none()}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @since 4.2 + */ + default UpdateResult replace(Query query, T replacement, String collectionName) { + return replace(query, replacement, ReplaceOptions.none(), collectionName); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document taking {@link ReplaceOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document.The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link ReplaceOptions} holding additional information. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 4.2 + */ + default UpdateResult replace(Query query, T replacement, ReplaceOptions options) { + return replace(query, replacement, options, getCollectionName(ClassUtils.getUserClass(replacement))); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document taking {@link ReplaceOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may * + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link ReplaceOptions} holding additional information. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @since 4.2 + */ + UpdateResult replace(Query query, T replacement, ReplaceOptions options, String collectionName); + /** * Returns the underlying {@link MongoConverter}. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java new file mode 100644 index 0000000000..37001faa4e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java @@ -0,0 +1,92 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.beans.factory.FactoryBean; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + +import com.mongodb.ServerApi; +import com.mongodb.ServerApi.Builder; +import com.mongodb.ServerApiVersion; + +/** + * {@link FactoryBean} for creating {@link ServerApi} using the {@link ServerApi.Builder}. + * + * @author Christoph Strobl + * @since 3.3 + */ +public class MongoServerApiFactoryBean implements FactoryBean { + + private String version; + private @Nullable Boolean deprecationErrors; + private @Nullable Boolean strict; + + /** + * @param version the version string either as the enum name or the server version value. + * @see ServerApiVersion + */ + public void setVersion(String version) { + this.version = version; + } + + /** + * @param deprecationErrors + * @see ServerApi.Builder#deprecationErrors(boolean) + */ + public void setDeprecationErrors(@Nullable Boolean deprecationErrors) { + this.deprecationErrors = deprecationErrors; + } + + /** + * @param strict + * @see ServerApi.Builder#strict(boolean) + */ + public void setStrict(@Nullable Boolean strict) { + this.strict = strict; + } + + @Nullable + @Override + public ServerApi getObject() throws Exception { + + Builder builder = ServerApi.builder().version(version()); + + if (deprecationErrors != null) { + builder = builder.deprecationErrors(deprecationErrors); + } + if (strict != null) { + builder = builder.strict(strict); + } + return builder.build(); + } + + @Nullable + @Override + public Class getObjectType() { + return ServerApi.class; + } + + private ServerApiVersion version() { + try { + // lookup by name eg. 'V1' + return ObjectUtils.caseInsensitiveValueOf(ServerApiVersion.values(), version); + } catch (IllegalArgumentException e) { + // or just the version number, eg. just '1' + return ServerApiVersion.findByValue(version); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java index eae4f42706..fd547c61a0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java @@ -1,5 +1,5 @@ /* - * Copyright 2010-2021 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,12 +22,14 @@ import java.math.RoundingMode; import java.util.*; import java.util.concurrent.TimeUnit; +import java.util.function.BiPredicate; import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.bson.Document; import org.bson.conversions.Bson; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; @@ -43,6 +45,8 @@ import org.springframework.dao.OptimisticLockingFailureException; import org.springframework.dao.support.PersistenceExceptionTranslator; import org.springframework.data.convert.EntityReader; +import org.springframework.data.domain.OffsetScrollPosition; +import org.springframework.data.domain.Window; import org.springframework.data.geo.Distance; import org.springframework.data.geo.GeoResult; import org.springframework.data.geo.GeoResults; @@ -54,6 +58,7 @@ import org.springframework.data.mongodb.MongoDatabaseUtils; import org.springframework.data.mongodb.SessionSynchronization; import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.CollectionPreparerSupport.CollectionPreparerDelegate; import org.springframework.data.mongodb.core.DefaultBulkOperations.BulkOperationContext; import org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity; import org.springframework.data.mongodb.core.QueryOperations.AggregationDefinition; @@ -62,11 +67,13 @@ import org.springframework.data.mongodb.core.QueryOperations.DistinctQueryContext; import org.springframework.data.mongodb.core.QueryOperations.QueryContext; import org.springframework.data.mongodb.core.QueryOperations.UpdateContext; +import org.springframework.data.mongodb.core.ScrollUtils.KeysetScrollQuery; import org.springframework.data.mongodb.core.aggregation.Aggregation; import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions.Builder; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; import org.springframework.data.mongodb.core.aggregation.AggregationResults; -import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.TypedAggregation; import org.springframework.data.mongodb.core.convert.DbRefResolver; import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; @@ -78,30 +85,30 @@ import org.springframework.data.mongodb.core.convert.MongoWriter; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.index.DefaultSearchIndexOperations; import org.springframework.data.mongodb.core.index.IndexOperations; import org.springframework.data.mongodb.core.index.IndexOperationsProvider; import org.springframework.data.mongodb.core.index.MongoMappingEventPublisher; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCreator; +import org.springframework.data.mongodb.core.index.SearchIndexOperations; +import org.springframework.data.mongodb.core.index.SearchIndexOperationsProvider; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.event.*; -import org.springframework.data.mongodb.core.mapreduce.GroupBy; -import org.springframework.data.mongodb.core.mapreduce.GroupByResults; import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; import org.springframework.data.mongodb.core.mapreduce.MapReduceResults; import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.core.query.Collation; -import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Meta; -import org.springframework.data.mongodb.core.query.Meta.CursorOption; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; +import org.springframework.data.mongodb.core.timeseries.Granularity; import org.springframework.data.mongodb.core.validation.Validator; -import org.springframework.data.mongodb.util.BsonUtils; -import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; +import org.springframework.data.projection.EntityProjection; import org.springframework.data.util.CloseableIterator; import org.springframework.data.util.Optionals; import org.springframework.lang.Nullable; @@ -132,7 +139,23 @@ import com.mongodb.client.result.UpdateResult; /** - * Primary implementation of {@link MongoOperations}. + * Primary implementation of {@link MongoOperations}. It simplifies the use of imperative MongoDB usage and helps to + * avoid common errors. It executes core MongoDB workflow, leaving application code to provide {@link Document} and + * extract results. This class executes BSON queries or updates, initiating iteration over {@link FindIterable} and + * catching MongoDB exceptions and translating them to the generic, more informative exception hierarchy defined in the + * org.springframework.dao package. Can be used within a service implementation via direct instantiation with a + * {@link MongoDatabaseFactory} reference, or get prepared in an application context and given to services as bean + * reference. + *

    + * Note: The {@link MongoDatabaseFactory} should always be configured as a bean in the application context, in the first + * case given to the service directly, in the second case to the prepared template. + *

    {@link ReadPreference} and {@link com.mongodb.ReadConcern}

    + *

    + * {@code ReadPreference} and {@code ReadConcern} are generally considered from {@link Query} and + * {@link AggregationOptions} objects for the action to be executed on a particular {@link MongoCollection}. + *

    + * You can also set the default {@link #setReadPreference(ReadPreference) ReadPreference} on the template level to + * generally apply a {@link ReadPreference}. * * @author Thomas Risberg * @author Graeme Rocher @@ -159,10 +182,14 @@ * @author Yadhukrishna S Pai * @author Anton Barkan * @author Bartłomiej Mazur + * @author Michael Krog + * @author Jakub Zurawa + * @author Florian Lüdiger */ -public class MongoTemplate implements MongoOperations, ApplicationContextAware, IndexOperationsProvider { +public class MongoTemplate implements MongoOperations, ApplicationContextAware, IndexOperationsProvider, + SearchIndexOperationsProvider, ReadPreferenceAware { - private static final Logger LOGGER = LoggerFactory.getLogger(MongoTemplate.class); + private static final Log LOGGER = LogFactory.getLog(MongoTemplate.class); private static final WriteResultChecking DEFAULT_WRITE_RESULT_CHECKING = WriteResultChecking.NONE; private final MongoConverter mongoConverter; @@ -172,10 +199,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware, private final QueryMapper queryMapper; private final UpdateMapper updateMapper; private final JsonSchemaMapper schemaMapper; - private final SpelAwareProxyProjectionFactory projectionFactory; private final EntityOperations operations; private final PropertyOperations propertyOperations; private final QueryOperations queryOperations; + private final EntityLifecycleEventDelegate eventDelegate; private @Nullable WriteConcern writeConcern; private WriteConcernResolver writeConcernResolver = DefaultWriteConcernResolver.INSTANCE; @@ -188,6 +215,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware, private SessionSynchronization sessionSynchronization = SessionSynchronization.ON_ACTUAL_TRANSACTION; + private CountExecution countExecution = this::doExactCount; + /** * Constructor used for a basic template configuration. * @@ -216,7 +245,7 @@ public MongoTemplate(MongoDatabaseFactory mongoDbFactory) { */ public MongoTemplate(MongoDatabaseFactory mongoDbFactory, @Nullable MongoConverter mongoConverter) { - Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null!"); + Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null"); this.mongoDbFactory = mongoDbFactory; this.exceptionTranslator = mongoDbFactory.getExceptionTranslator(); @@ -224,18 +253,16 @@ public MongoTemplate(MongoDatabaseFactory mongoDbFactory, @Nullable MongoConvert this.queryMapper = new QueryMapper(this.mongoConverter); this.updateMapper = new UpdateMapper(this.mongoConverter); this.schemaMapper = new MongoJsonSchemaMapper(this.mongoConverter); - this.projectionFactory = new SpelAwareProxyProjectionFactory(); - this.operations = new EntityOperations(this.mongoConverter.getMappingContext()); + this.operations = new EntityOperations(this.mongoConverter, this.queryMapper); this.propertyOperations = new PropertyOperations(this.mongoConverter.getMappingContext()); this.queryOperations = new QueryOperations(queryMapper, updateMapper, operations, propertyOperations, mongoDbFactory); + this.eventDelegate = new EntityLifecycleEventDelegate(); // We always have a mapping context in the converter, whether it's a simple one or not mappingContext = this.mongoConverter.getMappingContext(); // We create indexes based on mapping events - if (mappingContext instanceof MongoMappingContext) { - - MongoMappingContext mappingContext = (MongoMappingContext) this.mappingContext; + if (mappingContext instanceof MongoMappingContext mappingContext) { if (mappingContext.isAutoIndexCreation()) { @@ -254,8 +281,8 @@ private MongoTemplate(MongoDatabaseFactory dbFactory, MongoTemplate that) { // we need to (re)create the MappingMongoConverter as we need to have it use a DbRefResolver that operates within // the sames session. Otherwise loading referenced objects would happen outside of it. - if (that.mongoConverter instanceof MappingMongoConverter) { - this.mongoConverter = ((MappingMongoConverter) that.mongoConverter).with(dbFactory); + if (that.mongoConverter instanceof MappingMongoConverter mappingMongoConverter) { + this.mongoConverter = mappingMongoConverter.with(dbFactory); } else { this.mongoConverter = that.mongoConverter; } @@ -263,11 +290,11 @@ private MongoTemplate(MongoDatabaseFactory dbFactory, MongoTemplate that) { this.queryMapper = that.queryMapper; this.updateMapper = that.updateMapper; this.schemaMapper = that.schemaMapper; - this.projectionFactory = that.projectionFactory; this.mappingContext = that.mappingContext; this.operations = that.operations; this.propertyOperations = that.propertyOperations; this.queryOperations = that.queryOperations; + this.eventDelegate = that.eventDelegate; } /** @@ -310,34 +337,51 @@ public void setReadPreference(@Nullable ReadPreference readPreference) { this.readPreference = readPreference; } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext) + @Override + public boolean hasReadPreference() { + return this.readPreference != null; + } + + @Override + public ReadPreference getReadPreference() { + return this.readPreference; + } + + /** + * Configure whether lifecycle events such as {@link AfterLoadEvent}, {@link BeforeSaveEvent}, etc. should be + * published or whether emission should be suppressed. Enabled by default. + * + * @param enabled {@code true} to enable entity lifecycle events; {@code false} to disable entity lifecycle events. + * @since 4.0 + * @see MongoMappingEvent */ + public void setEntityLifecycleEventsEnabled(boolean enabled) { + this.eventDelegate.setEventsEnabled(enabled); + } + + @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { prepareIndexCreator(applicationContext); eventPublisher = applicationContext; + eventDelegate.setPublisher(eventPublisher); if (entityCallbacks == null) { setEntityCallbacks(EntityCallbacks.create(applicationContext)); } - if (mappingContext instanceof ApplicationEventPublisherAware) { - ((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher); + if (mappingContext instanceof ApplicationEventPublisherAware applicationEventPublisherAware) { + applicationEventPublisherAware.setApplicationEventPublisher(eventPublisher); } resourceLoader = applicationContext; - - projectionFactory.setBeanFactory(applicationContext); - projectionFactory.setBeanClassLoader(applicationContext.getClassLoader()); } /** * Set the {@link EntityCallbacks} instance to use when invoking * {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the {@link BeforeSaveCallback}. - *

    + *
    * Overrides potentially existing {@link EntityCallbacks}. * * @param entityCallbacks must not be {@literal null}. @@ -346,10 +390,51 @@ public void setApplicationContext(ApplicationContext applicationContext) throws */ public void setEntityCallbacks(EntityCallbacks entityCallbacks) { - Assert.notNull(entityCallbacks, "EntityCallbacks must not be null!"); + Assert.notNull(entityCallbacks, "EntityCallbacks must not be null"); this.entityCallbacks = entityCallbacks; } + /** + * Configure whether to use estimated count. Defaults to exact counting. + * + * @param enabled use {@link com.mongodb.client.MongoCollection#estimatedDocumentCount()} for unpaged and empty + * {@link Query queries} if {@code true}. + * @since 3.4 + */ + public void useEstimatedCount(boolean enabled) { + useEstimatedCount(enabled, this::countCanBeEstimated); + } + + /** + * Configure whether to use estimated count based on the given {@link BiPredicate estimationFilter}. + * + * @param enabled use {@link com.mongodb.client.MongoCollection#estimatedDocumentCount()} for unpaged and empty + * {@link Query queries} if {@code true}. + * @param estimationFilter the {@link BiPredicate filter}. + * @since 3.4 + */ + private void useEstimatedCount(boolean enabled, BiPredicate estimationFilter) { + + if (enabled) { + + this.countExecution = (collectionPreparer, collectionName, filter, options) -> { + + if (!estimationFilter.test(filter, options)) { + return doExactCount(collectionPreparer, collectionName, filter, options); + } + + EstimatedDocumentCountOptions estimatedDocumentCountOptions = new EstimatedDocumentCountOptions(); + if (options.getMaxTime(TimeUnit.MILLISECONDS) > 0) { + estimatedDocumentCountOptions.maxTime(options.getMaxTime(TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS); + } + + return doEstimatedCount(collectionPreparer, collectionName, estimatedDocumentCountOptions); + }; + } else { + this.countExecution = this::doExactCount; + } + } + /** * Inspects the given {@link ApplicationContext} for {@link MongoPersistentEntityIndexCreator} and those in turn if * they were registered for the current {@link MappingContext}. If no creator for the current {@link MappingContext} @@ -369,8 +454,8 @@ private void prepareIndexCreator(ApplicationContext context) { } } - if (context instanceof ConfigurableApplicationContext && indexCreator != null) { - ((ConfigurableApplicationContext) context).addApplicationListener(indexCreator); + if (context instanceof ConfigurableApplicationContext configurableApplicationContext && indexCreator != null) { + configurableApplicationContext.addApplicationListener(indexCreator); } } @@ -379,51 +464,45 @@ private void prepareIndexCreator(ApplicationContext context) { * * @return */ + @Override public MongoConverter getConverter() { return this.mongoConverter; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#executeAsStream(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ @Override - public CloseableIterator stream(Query query, Class entityType) { + public Stream stream(Query query, Class entityType) { return stream(query, entityType, getCollectionName(entityType)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#stream(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override - public CloseableIterator stream(Query query, Class entityType, String collectionName) { + public Stream stream(Query query, Class entityType, String collectionName) { return doStream(query, entityType, collectionName, entityType); } @SuppressWarnings("ConstantConditions") - protected CloseableIterator doStream(Query query, Class entityType, String collectionName, - Class returnType) { + protected Stream doStream(Query query, Class entityType, String collectionName, Class returnType) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(entityType, "Entity type must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(returnType, "ReturnType must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(entityType, "Entity type must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(returnType, "ReturnType must not be null"); - return execute(collectionName, (CollectionCallback>) collection -> { + return execute(collectionName, (CollectionCallback>) collection -> { MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(entityType); QueryContext queryContext = queryOperations.createQueryContext(query); + EntityProjection projection = operations.introspectProjection(returnType, entityType); Document mappedQuery = queryContext.getMappedQuery(persistentEntity); - Document mappedFields = queryContext.getMappedFields(persistentEntity, returnType, projectionFactory); + Document mappedFields = queryContext.getMappedFields(persistentEntity, projection); + CollectionPreparerDelegate readPreference = createDelegate(query); FindIterable cursor = new QueryCursorPreparer(query, entityType).initiateFind(collection, - col -> col.find(mappedQuery, Document.class).projection(mappedFields)); + col -> readPreference.prepare(col).find(mappedQuery, Document.class).projection(mappedFields)); return new CloseableIterableCursorAdapter<>(cursor, exceptionTranslator, - new ProjectingReadCallback<>(mongoConverter, entityType, returnType, collectionName)); + new ProjectingReadCallback<>(mongoConverter, projection, collectionName)).stream(); }); } @@ -432,52 +511,35 @@ public String getCollectionName(Class entityClass) { return this.operations.determineCollectionName(entityClass); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#executeCommand(java.lang.String) - */ @Override @SuppressWarnings("ConstantConditions") public Document executeCommand(String jsonCommand) { - Assert.hasText(jsonCommand, "JsonCommand must not be null nor empty!"); + Assert.hasText(jsonCommand, "JsonCommand must not be null nor empty"); return execute(db -> db.runCommand(Document.parse(jsonCommand), Document.class)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#executeCommand(org.bson.Document) - */ @Override @SuppressWarnings("ConstantConditions") public Document executeCommand(Document command) { - Assert.notNull(command, "Command must not be null!"); + Assert.notNull(command, "Command must not be null"); return execute(db -> db.runCommand(command, Document.class)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#executeCommand(org.bson.Document, com.mongodb.ReadPreference) - */ - @Override @SuppressWarnings("ConstantConditions") public Document executeCommand(Document command, @Nullable ReadPreference readPreference) { - Assert.notNull(command, "Command must not be null!"); + Assert.notNull(command, "Command must not be null"); return execute(db -> readPreference != null // ? db.runCommand(command, readPreference, Document.class) // : db.runCommand(command, Document.class)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#executeQuery(org.springframework.data.mongodb.core.query.Query, java.lang.String, org.springframework.data.mongodb.core.DocumentCallbackHandler) - */ @Override public void executeQuery(Query query, String collectionName, DocumentCallbackHandler dch) { executeQuery(query, collectionName, dch, new QueryCursorPreparer(query, null)); @@ -497,30 +559,27 @@ public void executeQuery(Query query, String collectionName, DocumentCallbackHan protected void executeQuery(Query query, String collectionName, DocumentCallbackHandler documentCallbackHandler, @Nullable CursorPreparer preparer) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(documentCallbackHandler, "DocumentCallbackHandler must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(documentCallbackHandler, "DocumentCallbackHandler must not be null"); Document queryObject = queryMapper.getMappedObject(query.getQueryObject(), Optional.empty()); Document sortObject = query.getSortObject(); Document fieldsObject = query.getFieldsObject(); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing query: {} sort: {} fields: {} in collection: {}", serializeToJsonSafely(queryObject), - sortObject, fieldsObject, collectionName); + LOGGER.debug(String.format("Executing query: %s fields: %s sort: %s in collection: %s", + serializeToJsonSafely(queryObject), fieldsObject, serializeToJsonSafely(sortObject), collectionName)); } - this.executeQueryInternal(new FindCallback(queryObject, fieldsObject, null), + this.executeQueryInternal(new FindCallback(createDelegate(query), queryObject, fieldsObject, null), preparer != null ? preparer : CursorPreparer.NO_OP_PREPARER, documentCallbackHandler, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#execute(org.springframework.data.mongodb.core.DbCallback) - */ + @Override public T execute(DbCallback action) { - Assert.notNull(action, "DbCallback must not be null!"); + Assert.notNull(action, "DbCallback must not be null"); try { MongoDatabase db = prepareDatabase(this.doGetDatabase()); @@ -530,24 +589,18 @@ public T execute(DbCallback action) { } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#execute(java.lang.Class, org.springframework.data.mongodb.core.DbCallback) - */ + @Override public T execute(Class entityClass, CollectionCallback callback) { - Assert.notNull(entityClass, "EntityClass must not be null!"); + Assert.notNull(entityClass, "EntityClass must not be null"); return execute(getCollectionName(entityClass), callback); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#execute(java.lang.String, org.springframework.data.mongodb.core.DbCallback) - */ + @Override public T execute(String collectionName, CollectionCallback callback) { - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(callback, "CollectionCallback must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(callback, "CollectionCallback must not be null"); try { MongoCollection collection = getAndPrepareCollection(doGetDatabase(), collectionName); @@ -557,26 +610,18 @@ public T execute(String collectionName, CollectionCallback callback) { } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#withSession(com.mongodb.ClientSessionOptions) - */ @Override public SessionScoped withSession(ClientSessionOptions options) { - Assert.notNull(options, "ClientSessionOptions must not be null!"); + Assert.notNull(options, "ClientSessionOptions must not be null"); return withSession(() -> mongoDbFactory.getSession(options)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#withSession(com.mongodb.session.ClientSession) - */ @Override public MongoTemplate withSession(ClientSession session) { - Assert.notNull(session, "ClientSession must not be null!"); + Assert.notNull(session, "ClientSession must not be null"); return new SessionBoundMongoTemplate(session, MongoTemplate.this); } @@ -592,86 +637,98 @@ public void setSessionSynchronization(SessionSynchronization sessionSynchronizat this.sessionSynchronization = sessionSynchronization; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.Class) - */ + @Override public MongoCollection createCollection(Class entityClass) { - return createCollection(entityClass, CollectionOptions.empty()); + return createCollection(entityClass, operations.forType(entityClass).getCollectionOptions()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.Class, org.springframework.data.mongodb.core.CollectionOptions) - */ + @Override public MongoCollection createCollection(Class entityClass, @Nullable CollectionOptions collectionOptions) { - Assert.notNull(entityClass, "EntityClass must not be null!"); - - CollectionOptions options = collectionOptions != null ? collectionOptions : CollectionOptions.empty(); - options = Optionals - .firstNonEmpty(() -> Optional.ofNullable(collectionOptions).flatMap(CollectionOptions::getCollation), - () -> operations.forType(entityClass).getCollation()) // - .map(options::collation).orElse(options); + Assert.notNull(entityClass, "EntityClass must not be null"); - return doCreateCollection(getCollectionName(entityClass), convertToDocument(options, entityClass)); + return doCreateCollection(getCollectionName(entityClass), + operations.convertToCreateCollectionOptions(collectionOptions, entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.String) - */ + @Override public MongoCollection createCollection(String collectionName) { - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); return doCreateCollection(collectionName, new Document()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.String, org.springframework.data.mongodb.core.CollectionOptions) - */ + @Override public MongoCollection createCollection(String collectionName, @Nullable CollectionOptions collectionOptions) { - Assert.notNull(collectionName, "CollectionName must not be null!"); - return doCreateCollection(collectionName, convertToDocument(collectionOptions)); + Assert.notNull(collectionName, "CollectionName must not be null"); + return doCreateCollection(collectionName, + operations.convertToCreateCollectionOptions(collectionOptions, Object.class)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#getCollection(java.lang.String) - */ + @Override + public MongoCollection createView(String name, Class source, AggregationPipeline pipeline, + @Nullable ViewOptions options) { + + return createView(name, getCollectionName(source), + queryOperations.createAggregation(Aggregation.newAggregation(source, pipeline.getOperations()), source), + options); + } + + @Override + public MongoCollection createView(String name, String source, AggregationPipeline pipeline, + @Nullable ViewOptions options) { + + return createView(name, source, + queryOperations.createAggregation(Aggregation.newAggregation(pipeline.getOperations()), (Class) null), + options); + } + + private MongoCollection createView(String name, String source, AggregationDefinition aggregation, + @Nullable ViewOptions options) { + return doCreateView(name, source, aggregation.getAggregationPipeline(), options); + } + + protected MongoCollection doCreateView(String name, String source, List pipeline, + @Nullable ViewOptions options) { + + CreateViewOptions viewOptions = new CreateViewOptions(); + if (options != null) { + options.getCollation().map(Collation::toMongoCollation).ifPresent(viewOptions::collation); + } + + return execute(db -> { + db.createView(name, source, pipeline, viewOptions); + return db.getCollection(name); + }); + } + + @Override @SuppressWarnings("ConstantConditions") public MongoCollection getCollection(String collectionName) { - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); return execute(db -> db.getCollection(collectionName, Document.class)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#getCollection(java.lang.Class) - */ + @Override public boolean collectionExists(Class entityClass) { return collectionExists(getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#getCollection(java.lang.String) - */ + @Override @SuppressWarnings("ConstantConditions") public boolean collectionExists(String collectionName) { - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); return execute(db -> { - for (String name : db.listCollectionNames()) { + for (String name : MongoCompatibilityAdapter.mongoDatabaseAdapter().forDb(db).listCollectionNames()) { if (name.equals(collectionName)) { return true; } @@ -680,27 +737,21 @@ public boolean collectionExists(String collectionName) { }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#dropCollection(java.lang.Class) - */ + @Override public void dropCollection(Class entityClass) { dropCollection(getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#dropCollection(java.lang.String) - */ + @Override public void dropCollection(String collectionName) { - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); execute(collectionName, (CollectionCallback) collection -> { collection.drop(); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Dropped collection [{}]", - collection.getNamespace() != null ? collection.getNamespace().getCollectionName() : collectionName); + LOGGER.debug(String.format("Dropped collection [%s]", + collection.getNamespace() != null ? collection.getNamespace().getCollectionName() : collectionName)); } return null; }); @@ -711,46 +762,46 @@ public IndexOperations indexOps(String collectionName) { return indexOps(collectionName, null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#indexOps(java.lang.String) - */ + @Override public IndexOperations indexOps(String collectionName, @Nullable Class type) { return new DefaultIndexOperations(this, collectionName, type); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#indexOps(java.lang.Class) - */ + @Override public IndexOperations indexOps(Class entityClass) { return indexOps(getCollectionName(entityClass), entityClass); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#bulkOps(org.springframework.data.mongodb.core.BulkMode, java.lang.String) - */ - public BulkOperations bulkOps(BulkMode bulkMode, String collectionName) { - return bulkOps(bulkMode, null, collectionName); + @Override + public SearchIndexOperations searchIndexOps(String collectionName) { + return searchIndexOps(null, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#bulkOps(org.springframework.data.mongodb.core.BulkMode, java.lang.Class) - */ + @Override + public SearchIndexOperations searchIndexOps(Class type) { + return new DefaultSearchIndexOperations(this, type); + } + + @Override + public SearchIndexOperations searchIndexOps(@Nullable Class type, String collectionName) { + return new DefaultSearchIndexOperations(this, collectionName, type); + } + + @Override + public BulkOperations bulkOps(BulkMode mode, String collectionName) { + return bulkOps(mode, null, collectionName); + } + + @Override public BulkOperations bulkOps(BulkMode bulkMode, Class entityClass) { return bulkOps(bulkMode, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#bulkOps(org.springframework.data.mongodb.core.BulkMode, java.lang.Class, java.lang.String) - */ + @Override public BulkOperations bulkOps(BulkMode mode, @Nullable Class entityType, String collectionName) { - Assert.notNull(mode, "BulkMode must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(mode, "BulkMode must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); DefaultBulkOperations operations = new DefaultBulkOperations(this, collectionName, new BulkOperationContext(mode, Optional.ofNullable(getPersistentEntity(entityType)), queryMapper, updateMapper, @@ -761,10 +812,6 @@ public BulkOperations bulkOps(BulkMode mode, @Nullable Class entityType, Stri return operations; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#scriptOps() - */ @Override public ScriptOperations scriptOps() { return new DefaultScriptOperations(this); @@ -782,13 +829,13 @@ public T findOne(Query query, Class entityClass) { @Override public T findOne(Query query, Class entityClass, String collectionName) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); if (ObjectUtils.isEmpty(query.getSortObject())) { - return doFindOne(collectionName, query.getQueryObject(), query.getFieldsObject(), + return doFindOne(collectionName, createDelegate(query), query.getQueryObject(), query.getFieldsObject(), new QueryCursorPreparer(query, entityClass), entityClass); } else { query.limit(1); @@ -814,41 +861,76 @@ public boolean exists(Query query, @Nullable Class entityClass, String collec if (query == null) { throw new InvalidDataAccessApiUsageException("Query passed in to exist can't be null"); } - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); QueryContext queryContext = queryOperations.createQueryContext(query); Document mappedQuery = queryContext.getMappedQuery(entityClass, this::getPersistentEntity); return execute(collectionName, - new ExistsCallback(mappedQuery, queryContext.getCollation(entityClass).orElse(null))); + new ExistsCallback(createDelegate(query), mappedQuery, queryContext.getCollation(entityClass).orElse(null))); } // Find methods that take a Query to express the query and that return a List of objects. - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findOne(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ @Override public List find(Query query, Class entityClass) { return find(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findOne(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override public List find(Query query, Class entityClass, String collectionName) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); - return doFind(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass, + return doFind(collectionName, createDelegate(query), query.getQueryObject(), query.getFieldsObject(), entityClass, new QueryCursorPreparer(query, entityClass)); } + @Override + public Window scroll(Query query, Class entityType) { + + Assert.notNull(entityType, "Entity type must not be null"); + + return scroll(query, entityType, getCollectionName(entityType)); + } + + @Override + public Window scroll(Query query, Class entityType, String collectionName) { + return doScroll(query, entityType, entityType, collectionName); + } + + Window doScroll(Query query, Class sourceClass, Class targetClass, String collectionName) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(sourceClass, "Entity type must not be null"); + Assert.notNull(targetClass, "Target type must not be null"); + + EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); + ProjectingReadCallback callback = new ProjectingReadCallback<>(mongoConverter, projection, collectionName); + int limit = query.isLimited() ? query.getLimit() + 1 : Integer.MAX_VALUE; + + if (query.hasKeyset()) { + + KeysetScrollQuery keysetPaginationQuery = ScrollUtils.createKeysetPaginationQuery(query, + operations.getIdPropertyName(sourceClass)); + + List result = doFind(collectionName, createDelegate(query), keysetPaginationQuery.query(), + keysetPaginationQuery.fields(), sourceClass, + new QueryCursorPreparer(query, keysetPaginationQuery.sort(), limit, 0, sourceClass), callback); + + return ScrollUtils.createWindow(query, result, sourceClass, operations); + } + + List result = doFind(collectionName, createDelegate(query), query.getQueryObject(), query.getFieldsObject(), + sourceClass, new QueryCursorPreparer(query, query.getSortObject(), limit, query.getSkip(), sourceClass), + callback); + + return ScrollUtils.createWindow(result, query.getLimit(), OffsetScrollPosition.positionFunction(query.getSkip())); + } + @Nullable @Override public T findById(Object id, Class entityClass) { @@ -859,38 +941,31 @@ public T findById(Object id, Class entityClass) { @Override public T findById(Object id, Class entityClass, String collectionName) { - Assert.notNull(id, "Id must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(id, "Id must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); String idKey = operations.getIdPropertyName(entityClass); - return doFindOne(collectionName, new Document(idKey, id), new Document(), entityClass); + return doFindOne(collectionName, CollectionPreparer.identity(), new Document(idKey, id), new Document(), + entityClass); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findDistinct(org.springframework.data.mongodb.core.query.Query, java.lang.String, java.lang.Class, java.lang.Class) - */ @Override public List findDistinct(Query query, String field, Class entityClass, Class resultClass) { return findDistinct(query, field, getCollectionName(entityClass), entityClass, resultClass); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findDistinct(org.springframework.data.mongodb.core.query.Query, java.lang.String, java.lang.String, java.lang.Class, java.lang.Class) - */ @Override @SuppressWarnings("unchecked") public List findDistinct(Query query, String field, String collectionName, Class entityClass, Class resultClass) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(field, "Field must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); - Assert.notNull(resultClass, "ResultClass must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(field, "Field must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(resultClass, "ResultClass must not be null"); MongoPersistentEntity entity = entityClass != Object.class ? getPersistentEntity(entityClass) : null; DistinctQueryContext distinctQueryContext = queryOperations.distinctQueryContext(query, field); @@ -902,14 +977,11 @@ public List findDistinct(Query query, String field, String collectionName MongoIterable result = execute(collectionName, (collection) -> { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing findDistinct using query {} for field: {} in collection: {}", - serializeToJsonSafely(mappedQuery), field, collectionName); + LOGGER.debug(String.format("Executing findDistinct using query %s for field: %s in collection: %s", + serializeToJsonSafely(mappedQuery), field, collectionName)); } - QueryCursorPreparer preparer = new QueryCursorPreparer(query, entityClass); - if (preparer.hasReadPreference()) { - collection = collection.withReadPreference(preparer.getReadPreference()); - } + collection = createDelegate(query).prepare(collection); DistinctIterable iterable = collection.distinct(mappedFieldName, mappedQuery, mongoDriverCompatibleType); distinctQueryContext.applyCollation(entityClass, iterable::collation); @@ -946,28 +1018,39 @@ public GeoResults geoNear(NearQuery near, Class domainType, String col public GeoResults geoNear(NearQuery near, Class domainType, String collectionName, Class returnType) { if (near == null) { - throw new InvalidDataAccessApiUsageException("NearQuery must not be null!"); + throw new InvalidDataAccessApiUsageException("NearQuery must not be null"); } if (domainType == null) { - throw new InvalidDataAccessApiUsageException("Entity class must not be null!"); + throw new InvalidDataAccessApiUsageException("Entity class must not be null"); } - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(returnType, "ReturnType must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(returnType, "ReturnType must not be null"); String collection = StringUtils.hasText(collectionName) ? collectionName : getCollectionName(domainType); String distanceField = operations.nearQueryDistanceFieldName(domainType); + Builder optionsBuilder = AggregationOptions.builder().collation(near.getCollation()); + + if (near.hasReadPreference()) { + optionsBuilder.readPreference(near.getReadPreference()); + } + + if (near.hasReadConcern()) { + optionsBuilder.readConcern(near.getReadConcern()); + } + Aggregation $geoNear = TypedAggregation.newAggregation(domainType, Aggregation.geoNear(near, distanceField)) - .withOptions(AggregationOptions.builder().collation(near.getCollation()).build()); + .withOptions(optionsBuilder.build()); AggregationResults results = aggregate($geoNear, collection, Document.class); + EntityProjection projection = operations.introspectProjection(returnType, domainType); DocumentCallback> callback = new GeoNearResultDocumentCallback<>(distanceField, - new ProjectingReadCallback<>(mongoConverter, domainType, returnType, collection), near.getMetric()); + new ProjectingReadCallback<>(mongoConverter, projection, collection), near.getMetric()); - List> result = new ArrayList<>(); + List> result = new ArrayList<>(results.getMappedResults().size()); BigDecimal aggregate = BigDecimal.ZERO; for (Document element : results) { @@ -1007,50 +1090,48 @@ public T findAndModify(Query query, UpdateDefinition update, FindAndModifyOp public T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass, String collectionName) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(update, "Update must not be null!"); - Assert.notNull(options, "Options must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); + Assert.notNull(options, "Options must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); FindAndModifyOptions optionsToUse = FindAndModifyOptions.of(options); Optionals.ifAllPresent(query.getCollation(), optionsToUse.getCollation(), (l, r) -> { throw new IllegalArgumentException( - "Both Query and FindAndModifyOptions define a collation. Please provide the collation only via one of the two."); + "Both Query and FindAndModifyOptions define a collation; Please provide the collation only via one of the two"); }); if (!options.getCollation().isPresent()) { operations.forType(entityClass).getCollation(query).ifPresent(optionsToUse::collation); } - return doFindAndModify(collectionName, query.getQueryObject(), query.getFieldsObject(), + return doFindAndModify(createDelegate(query), collectionName, query.getQueryObject(), query.getFieldsObject(), getMappedSortObject(query, entityClass), entityClass, update, optionsToUse); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findAndReplace(org.springframework.data.mongodb.core.query.Query, java.lang.Object, org.springframework.data.mongodb.core.FindAndReplaceOptions, java.lang.Class, java.lang.String, java.lang.Class) - */ @Override public T findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, String collectionName, Class resultType) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(replacement, "Replacement must not be null!"); - Assert.notNull(options, "Options must not be null! Use FindAndReplaceOptions#empty() instead."); - Assert.notNull(entityType, "EntityType must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(resultType, "ResultType must not be null! Use Object.class instead."); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(replacement, "Replacement must not be null"); + Assert.notNull(options, "Options must not be null Use FindAndReplaceOptions#empty() instead"); + Assert.notNull(entityType, "EntityType must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(resultType, "ResultType must not be null Use Object.class instead"); - Assert.isTrue(query.getLimit() <= 1, "Query must not define a limit other than 1 ore none!"); - Assert.isTrue(query.getSkip() <= 0, "Query must not define skip."); + Assert.isTrue(query.getLimit() <= 1, "Query must not define a limit other than 1 ore none"); + Assert.isTrue(query.getSkip() <= 0, "Query must not define skip"); MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityType); QueryContext queryContext = queryOperations.createQueryContext(query); + EntityProjection projection = operations.introspectProjection(resultType, entityType); + CollectionPreparerDelegate collectionPreparer = createDelegate(query); Document mappedQuery = queryContext.getMappedQuery(entity); - Document mappedFields = queryContext.getMappedFields(entity, resultType, projectionFactory); + Document mappedFields = queryContext.getMappedFields(entity, projection); Document mappedSort = queryContext.getMappedSort(entity); replacement = maybeCallBeforeConvert(replacement, collectionName); @@ -1059,8 +1140,8 @@ public T findAndReplace(Query query, S replacement, FindAndReplaceOptions maybeEmitEvent(new BeforeSaveEvent<>(replacement, mappedReplacement, collectionName)); maybeCallBeforeSave(replacement, mappedReplacement, collectionName); - T saved = doFindAndReplace(collectionName, mappedQuery, mappedFields, mappedSort, - queryContext.getCollation(entityType).orElse(null), entityType, mappedReplacement, options, resultType); + T saved = doFindAndReplace(collectionPreparer, collectionName, mappedQuery, mappedFields, mappedSort, + queryContext.getCollation(entityType).orElse(null), entityType, mappedReplacement, options, projection); if (saved != null) { maybeEmitEvent(new AfterSaveEvent<>(saved, mappedReplacement, collectionName)); @@ -1083,11 +1164,11 @@ public T findAndRemove(Query query, Class entityClass) { @Override public T findAndRemove(Query query, Class entityClass, String collectionName) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); - return doFindAndRemove(collectionName, query.getQueryObject(), query.getFieldsObject(), + return doFindAndRemove(createDelegate(query), collectionName, query.getQueryObject(), query.getFieldsObject(), getMappedSortObject(query, entityClass), operations.forType(entityClass).getCollation(query).orElse(null), entityClass); } @@ -1095,7 +1176,7 @@ public T findAndRemove(Query query, Class entityClass, String collectionN @Override public long count(Query query, Class entityClass) { - Assert.notNull(entityClass, "Entity class must not be null!"); + Assert.notNull(entityClass, "Entity class must not be null"); return count(query, entityClass, getCollectionName(entityClass)); } @@ -1108,28 +1189,30 @@ public long count(Query query, String collectionName) { * (non-Javadoc) * @see org.springframework.data.mongodb.core.MongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) */ + @Override public long count(Query query, @Nullable Class entityClass, String collectionName) { - Assert.notNull(query, "Query must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(query, "Query must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); CountContext countContext = queryOperations.countQueryContext(query); CountOptions options = countContext.getCountOptions(entityClass); Document mappedQuery = countContext.getMappedQuery(entityClass, mappingContext::getPersistentEntity); - return doCount(collectionName, mappedQuery, options); + CollectionPreparerDelegate readPreference = createDelegate(query); + return doCount(readPreference, collectionName, mappedQuery, options); } - @SuppressWarnings("ConstantConditions") - protected long doCount(String collectionName, Document filter, CountOptions options) { + protected long doCount(CollectionPreparer collectionPreparer, String collectionName, Document filter, + CountOptions options) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing count: {} in collection: {}", serializeToJsonSafely(filter), collectionName); + LOGGER + .debug(String.format("Executing count: %s in collection: %s", serializeToJsonSafely(filter), collectionName)); } - return execute(collectionName, - collection -> collection.countDocuments(CountQuery.of(filter).toQueryDocument(), options)); + return countExecution.countDocuments(collectionPreparer, collectionName, filter, options); } /* @@ -1138,52 +1221,67 @@ protected long doCount(String collectionName, Document filter, CountOptions opti */ @Override public long estimatedCount(String collectionName) { - return doEstimatedCount(collectionName, new EstimatedDocumentCountOptions()); + return doEstimatedCount(CollectionPreparerDelegate.of(this), collectionName, new EstimatedDocumentCountOptions()); } - protected long doEstimatedCount(String collectionName, EstimatedDocumentCountOptions options) { - return execute(collectionName, collection -> collection.estimatedDocumentCount(options)); + protected long doEstimatedCount(CollectionPreparer> collectionPreparer, + String collectionName, EstimatedDocumentCountOptions options) { + return execute(collectionName, + collection -> collectionPreparer.prepare(collection).estimatedDocumentCount(options)); + } + + @Override + public long exactCount(Query query, @Nullable Class entityClass, String collectionName) { + + CountContext countContext = queryOperations.countQueryContext(query); + + CountOptions options = countContext.getCountOptions(entityClass); + Document mappedQuery = countContext.getMappedQuery(entityClass, mappingContext::getPersistentEntity); + + return doExactCount(createDelegate(query), collectionName, mappedQuery, options); + } + + protected long doExactCount(CollectionPreparer> collectionPreparer, String collectionName, + Document filter, CountOptions options) { + return execute(collectionName, collection -> collectionPreparer.prepare(collection) + .countDocuments(CountQuery.of(filter).toQueryDocument(), options)); + } + + protected boolean countCanBeEstimated(Document filter, CountOptions options) { + + return + // only empty filter for estimatedCount + filter.isEmpty() && + // no skip, no limit,... + isEmptyOptions(options) && + // transaction active? + !MongoDatabaseUtils.isTransactionActive(getMongoDatabaseFactory()); + } + + private boolean isEmptyOptions(CountOptions options) { + return options.getLimit() <= 0 && options.getSkip() <= 0; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#insert(java.lang.Object) - */ @Override public T insert(T objectToSave) { - Assert.notNull(objectToSave, "ObjectToSave must not be null!"); + Assert.notNull(objectToSave, "ObjectToSave must not be null"); - ensureNotIterable(objectToSave); + ensureNotCollectionLike(objectToSave); return insert(objectToSave, getCollectionName(ClassUtils.getUserClass(objectToSave))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#insert(java.lang.Object, java.lang.String) - */ @Override @SuppressWarnings("unchecked") public T insert(T objectToSave, String collectionName) { - Assert.notNull(objectToSave, "ObjectToSave must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(objectToSave, "ObjectToSave must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); - ensureNotIterable(objectToSave); + ensureNotCollectionLike(objectToSave); return (T) doInsert(collectionName, objectToSave, this.mongoConverter); } - /** - * Ensure the given {@literal source} is not an {@link java.lang.reflect.Array}, {@link Collection} or - * {@link Iterator}. - * - * @param source can be {@literal null}. - * @deprecated since 3.2. Call {@link #ensureNotCollectionLike(Object)} instead. - */ - protected void ensureNotIterable(@Nullable Object source) { - ensureNotCollectionLike(source); - } - /** * Ensure the given {@literal source} is not an {@link java.lang.reflect.Array}, {@link Collection} or * {@link Iterator}. @@ -1194,7 +1292,7 @@ protected void ensureNotIterable(@Nullable Object source) { protected void ensureNotCollectionLike(@Nullable Object source) { if (EntityOperations.isCollectionLike(source)) { - throw new IllegalArgumentException("Cannot use a collection here."); + throw new IllegalArgumentException("Cannot use a collection here"); } } @@ -1206,8 +1304,8 @@ protected void ensureNotCollectionLike(@Nullable Object source) { */ protected MongoCollection prepareCollection(MongoCollection collection) { - if (this.readPreference != null) { - collection = collection.withReadPreference(readPreference); + if (this.readPreference != null && this.readPreference != collection.getReadPreference()) { + return collection.withReadPreference(readPreference); } return collection; @@ -1234,7 +1332,7 @@ private WriteConcern potentiallyForceAcknowledgedWrite(@Nullable WriteConcern wc if (ObjectUtils.nullSafeEquals(WriteResultChecking.EXCEPTION, writeResultChecking)) { if (wc == null || wc.getWObject() == null - || (wc.getWObject() instanceof Number && ((Number) wc.getWObject()).intValue() < 1)) { + || (wc.getWObject() instanceof Number concern && concern.intValue() < 1)) { return WriteConcern.ACKNOWLEDGED; } } @@ -1266,7 +1364,7 @@ protected T doInsert(String collectionName, T objectToSave, MongoWriter w @SuppressWarnings("unchecked") public Collection insert(Collection batchToSave, Class entityClass) { - Assert.notNull(batchToSave, "BatchToSave must not be null!"); + Assert.notNull(batchToSave, "BatchToSave must not be null"); return (Collection) doInsertBatch(getCollectionName(entityClass), batchToSave, this.mongoConverter); } @@ -1275,8 +1373,8 @@ public Collection insert(Collection batchToSave, Class en @SuppressWarnings("unchecked") public Collection insert(Collection batchToSave, String collectionName) { - Assert.notNull(batchToSave, "BatchToSave must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(batchToSave, "BatchToSave must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); return (Collection) doInsertBatch(collectionName, batchToSave, this.mongoConverter); } @@ -1285,7 +1383,7 @@ public Collection insert(Collection batchToSave, String coll @SuppressWarnings("unchecked") public Collection insertAll(Collection objectsToSave) { - Assert.notNull(objectsToSave, "ObjectsToSave must not be null!"); + Assert.notNull(objectsToSave, "ObjectsToSave must not be null"); return (Collection) doInsertAll(objectsToSave, this.mongoConverter); } @@ -1302,12 +1400,7 @@ protected Collection doInsertAll(Collection listToSave, Mong } String collection = getCollectionName(ClassUtils.getUserClass(element)); - List collectionElements = elementsByCollection.get(collection); - - if (null == collectionElements) { - collectionElements = new ArrayList<>(); - elementsByCollection.put(collection, collectionElements); - } + List collectionElements = elementsByCollection.computeIfAbsent(collection, k -> new ArrayList<>()); collectionElements.add(element); } @@ -1322,9 +1415,9 @@ protected Collection doInsertAll(Collection listToSave, Mong protected Collection doInsertBatch(String collectionName, Collection batchToSave, MongoWriter writer) { - Assert.notNull(writer, "MongoWriter must not be null!"); + Assert.notNull(writer, "MongoWriter must not be null"); - List documentList = new ArrayList<>(); + List documentList = new ArrayList<>(batchToSave.size()); List initializedBatchToSave = new ArrayList<>(batchToSave.size()); for (T uninitialized : batchToSave) { @@ -1340,7 +1433,10 @@ protected Collection doInsertBatch(String collectionName, Collection(initialized, document, collectionName)); initialized = maybeCallBeforeSave(initialized, document, collectionName); - documentList.add(document); + MappedDocument mappedDocument = queryOperations.createInsertContext(MappedDocument.of(document)) + .prepareId(uninitialized.getClass()); + + documentList.add(mappedDocument.getDocument()); initializedBatchToSave.add(initialized); } @@ -1367,7 +1463,7 @@ protected Collection doInsertBatch(String collectionName, Collection T save(T objectToSave) { - Assert.notNull(objectToSave, "Object to save must not be null!"); + Assert.notNull(objectToSave, "Object to save must not be null"); return save(objectToSave, getCollectionName(ClassUtils.getUserClass(objectToSave))); } @@ -1375,8 +1471,8 @@ public T save(T objectToSave) { @SuppressWarnings("unchecked") public T save(T objectToSave, String collectionName) { - Assert.notNull(objectToSave, "Object to save must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(objectToSave, "Object to save must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); ensureNotCollectionLike(objectToSave); AdaptibleEntity source = operations.forEntity(objectToSave, mongoConverter.getConversionService()); @@ -1384,7 +1480,6 @@ public T save(T objectToSave, String collectionName) { return source.isVersionedEntity() // ? doSaveVersioned(source, collectionName) // : (T) doSave(collectionName, objectToSave, this.mongoConverter); - } @SuppressWarnings("unchecked") @@ -1420,7 +1515,7 @@ private T doSaveVersioned(AdaptibleEntity source, String collectionName) if (result.getModifiedCount() == 0) { throw new OptimisticLockingFailureException( - String.format("Cannot save entity %s with version %s to collection %s. Has it been modified meanwhile?", + String.format("Cannot save entity %s with version %s to collection %s; Has it been modified meanwhile", source.getId(), source.getVersion(), collectionName)); } maybeEmitEvent(new AfterSaveEvent<>(toSave, mapped.getDocument(), collectionName)); @@ -1453,21 +1548,25 @@ protected T doSave(String collectionName, T objectToSave, MongoWriter wri protected Object insertDocument(String collectionName, Document document, Class entityClass) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Inserting Document containing fields: {} in collection: {}", document.keySet(), collectionName); + LOGGER.debug(String.format("Inserting Document containing fields: %s in collection: %s", document.keySet(), + collectionName)); } + MappedDocument mappedDocument = queryOperations.createInsertContext(MappedDocument.of(document)) + .prepareId(entityClass); + return execute(collectionName, collection -> { MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT, collectionName, entityClass, - document, null); + mappedDocument.getDocument(), null); WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); if (writeConcernToUse == null) { - collection.insertOne(document); + collection.insertOne(mappedDocument.getDocument()); } else { - collection.withWriteConcern(writeConcernToUse).insertOne(document); + collection.withWriteConcern(writeConcernToUse).insertOne(mappedDocument.getDocument()); } - return operations.forEntity(document).getId(); + return operations.forEntity(mappedDocument.getDocument()).getId(); }); } @@ -1478,7 +1577,7 @@ protected List insertDocumentList(String collectionName, List } if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Inserting list of Documents containing {} items", documents.size()); + LOGGER.debug(String.format("Inserting list of Documents containing %s items", documents.size())); } execute(collectionName, collection -> { @@ -1502,7 +1601,7 @@ protected List insertDocumentList(String collectionName, List protected Object saveDocument(String collectionName, Document dbDoc, Class entityClass) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Saving Document containing fields: {}", dbDoc.keySet()); + LOGGER.debug(String.format("Saving Document containing fields: %s", dbDoc.keySet())); } return execute(collectionName, collection -> { @@ -1518,15 +1617,15 @@ protected Object saveDocument(String collectionName, Document dbDoc, Class en : collection.withWriteConcern(writeConcernToUse); if (!mapped.hasId()) { - collectionToUse.insertOne(dbDoc); + + mapped = queryOperations.createInsertContext(mapped).prepareId(mappingContext.getPersistentEntity(entityClass)); + collectionToUse.insertOne(mapped.getDocument()); } else { MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); UpdateContext updateContext = queryOperations.replaceSingleContext(mapped, true); Document replacement = updateContext.getMappedUpdate(entity); - - Document filter = updateContext.getMappedQuery(entity); - + Document filter = updateContext.getReplacementQuery(); if (updateContext.requiresShardKey(filter, entity)) { if (entity.getShardKey().isImmutable()) { @@ -1537,7 +1636,7 @@ protected Object saveDocument(String collectionName, Document dbDoc, Class en } } - collectionToUse.replaceOne(filter, replacement, new ReplaceOptions().upsert(true)); + collectionToUse.replaceOne(filter, replacement, new com.mongodb.client.model.ReplaceOptions().upsert(true)); } return mapped.getId(); }); @@ -1556,7 +1655,7 @@ public UpdateResult upsert(Query query, UpdateDefinition update, String collecti @Override public UpdateResult upsert(Query query, UpdateDefinition update, Class entityClass, String collectionName) { - Assert.notNull(entityClass, "EntityClass must not be null!"); + Assert.notNull(entityClass, "EntityClass must not be null"); return doUpdate(collectionName, query, update, entityClass, true, false); } @@ -1574,7 +1673,7 @@ public UpdateResult updateFirst(Query query, UpdateDefinition update, String col @Override public UpdateResult updateFirst(Query query, UpdateDefinition update, Class entityClass, String collectionName) { - Assert.notNull(entityClass, "EntityClass must not be null!"); + Assert.notNull(entityClass, "EntityClass must not be null"); return doUpdate(collectionName, query, update, entityClass, false, false); } @@ -1592,7 +1691,7 @@ public UpdateResult updateMulti(Query query, UpdateDefinition update, String col @Override public UpdateResult updateMulti(Query query, UpdateDefinition update, Class entityClass, String collectionName) { - Assert.notNull(entityClass, "EntityClass must not be null!"); + Assert.notNull(entityClass, "EntityClass must not be null"); return doUpdate(collectionName, query, update, entityClass, false, true); } @@ -1601,15 +1700,9 @@ public UpdateResult updateMulti(Query query, UpdateDefinition update, Class e protected UpdateResult doUpdate(String collectionName, Query query, UpdateDefinition update, @Nullable Class entityClass, boolean upsert, boolean multi) { - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(update, "Update must not be null!"); - - if (query.isSorted() && LOGGER.isWarnEnabled()) { - - LOGGER.warn("{} does not support sort ('{}'). Please use findAndModify() instead.", - upsert ? "Upsert" : "UpdateFirst", serializeToJsonSafely(query.getSortObject())); - } + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); MongoPersistentEntity entity = entityClass == null ? null : getPersistentEntity(entityClass); @@ -1618,7 +1711,7 @@ protected UpdateResult doUpdate(String collectionName, Query query, UpdateDefini updateContext.increaseVersionForUpdateIfNecessary(entity); Document queryObj = updateContext.getMappedQuery(entity); - UpdateOptions opts = updateContext.getUpdateOptions(entityClass); + UpdateOptions opts = updateContext.getUpdateOptions(entityClass, query); if (updateContext.isAggregationUpdate()) { @@ -1630,8 +1723,8 @@ protected UpdateResult doUpdate(String collectionName, Query query, UpdateDefini return execute(collectionName, collection -> { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Calling update using query: {} and update: {} in collection: {}", - serializeToJsonSafely(queryObj), serializeToJsonSafely(pipeline), collectionName); + LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s", + serializeToJsonSafely(queryObj), serializeToJsonSafely(pipeline), collectionName)); } collection = writeConcernToUse != null ? collection.withWriteConcern(writeConcernToUse) : collection; @@ -1648,8 +1741,8 @@ protected UpdateResult doUpdate(String collectionName, Query query, UpdateDefini return execute(collectionName, collection -> { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Calling update using query: {} and update: {} in collection: {}", serializeToJsonSafely(queryObj), - serializeToJsonSafely(updateObj), collectionName); + LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s", + serializeToJsonSafely(queryObj), serializeToJsonSafely(updateObj), collectionName)); } collection = writeConcernToUse != null ? collection.withWriteConcern(writeConcernToUse) : collection; @@ -1668,7 +1761,7 @@ protected UpdateResult doUpdate(String collectionName, Query query, UpdateDefini } } - ReplaceOptions replaceOptions = updateContext.getReplaceOptions(entityClass); + com.mongodb.client.model.ReplaceOptions replaceOptions = updateContext.getReplaceOptions(entityClass); return collection.replaceOne(filter, updateObj, replaceOptions); } else { return multi ? collection.updateMany(queryObj, updateObj, opts) @@ -1680,7 +1773,7 @@ protected UpdateResult doUpdate(String collectionName, Query query, UpdateDefini @Override public DeleteResult remove(Object object) { - Assert.notNull(object, "Object must not be null!"); + Assert.notNull(object, "Object must not be null"); return remove(object, getCollectionName(object.getClass())); } @@ -1688,8 +1781,8 @@ public DeleteResult remove(Object object) { @Override public DeleteResult remove(Object object, String collectionName) { - Assert.notNull(object, "Object must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(object, "Object must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); Query query = operations.forEntity(object).getRemoveByQuery(); @@ -1709,7 +1802,7 @@ public DeleteResult remove(Query query, Class entityClass) { @Override public DeleteResult remove(Query query, Class entityClass, String collectionName) { - Assert.notNull(entityClass, "EntityClass must not be null!"); + Assert.notNull(entityClass, "EntityClass must not be null"); return doRemove(collectionName, query, entityClass, true); } @@ -1717,8 +1810,8 @@ public DeleteResult remove(Query query, Class entityClass, String collectionN protected DeleteResult doRemove(String collectionName, Query query, @Nullable Class entityClass, boolean multi) { - Assert.notNull(query, "Query must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(query, "Query must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); MongoPersistentEntity entity = getPersistentEntity(entityClass); @@ -1739,8 +1832,8 @@ protected DeleteResult doRemove(String collectionName, Query query, @Nullabl Document removeQuery = queryObject; if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Remove using query: {} in collection: {}.", - new Object[] { serializeToJsonSafely(removeQuery), collectionName }); + LOGGER.debug(String.format("Remove using query: %s in collection: %s.", serializeToJsonSafely(removeQuery), + collectionName)); } if (query.getLimit() > 0 || query.getSkip() > 0) { @@ -1778,7 +1871,7 @@ public List findAll(Class entityClass) { @Override public List findAll(Class entityClass, String collectionName) { return executeFindMultiInternal( - new FindCallback(new Document(), new Document(), + new FindCallback(CollectionPreparer.identity(), new Document(), new Document(), operations.forType(entityClass).getCollation().map(Collation::toMongoCollation).orElse(null)), CursorPreparer.NO_OP_PREPARER, new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName), collectionName); @@ -1787,8 +1880,8 @@ public List findAll(Class entityClass, String collectionName) { @Override public MapReduceResults mapReduce(String inputCollectionName, String mapFunction, String reduceFunction, Class entityClass) { - return mapReduce(new Query(), inputCollectionName, mapFunction, reduceFunction, - new MapReduceOptions().outputTypeInline(), entityClass); + return mapReduce(new Query(), inputCollectionName, mapFunction, reduceFunction, new MapReduceOptions(), + entityClass); } @Override @@ -1800,8 +1893,7 @@ public MapReduceResults mapReduce(String inputCollectionName, String mapF @Override public MapReduceResults mapReduce(Query query, String inputCollectionName, String mapFunction, String reduceFunction, Class entityClass) { - return mapReduce(query, inputCollectionName, mapFunction, reduceFunction, new MapReduceOptions().outputTypeInline(), - entityClass); + return mapReduce(query, inputCollectionName, mapFunction, reduceFunction, new MapReduceOptions(), entityClass); } @Override @@ -1823,19 +1915,23 @@ public MapReduceResults mapReduce(Query query, String inputCollectionName * @param resultType * @return * @since 2.1 + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated public List mapReduce(Query query, Class domainType, String inputCollectionName, String mapFunction, String reduceFunction, @Nullable MapReduceOptions mapReduceOptions, Class resultType) { - Assert.notNull(domainType, "Domain type must not be null!"); - Assert.notNull(inputCollectionName, "Input collection name must not be null!"); - Assert.notNull(resultType, "Result type must not be null!"); - Assert.notNull(mapFunction, "Map function must not be null!"); - Assert.notNull(reduceFunction, "Reduce function must not be null!"); + Assert.notNull(domainType, "Domain type must not be null"); + Assert.notNull(inputCollectionName, "Input collection name must not be null"); + Assert.notNull(resultType, "Result type must not be null"); + Assert.notNull(mapFunction, "Map function must not be null"); + Assert.notNull(reduceFunction, "Reduce function must not be null"); String mapFunc = replaceWithResourceIfNecessary(mapFunction); String reduceFunc = replaceWithResourceIfNecessary(reduceFunction); - MongoCollection inputCollection = getAndPrepareCollection(doGetDatabase(), inputCollectionName); + CollectionPreparerDelegate readPreference = createDelegate(query); + MongoCollection inputCollection = readPreference + .prepare(getAndPrepareCollection(doGetDatabase(), inputCollectionName)); // MapReduceOp MapReduceIterable mapReduce = inputCollection.mapReduce(mapFunc, reduceFunc, Document.class); @@ -1843,13 +1939,13 @@ public List mapReduce(Query query, Class domainType, String inputColle if (query.getLimit() > 0 && mapReduceOptions != null && mapReduceOptions.getLimit() == null) { mapReduce = mapReduce.limit(query.getLimit()); } - if (query.getMeta().getMaxTimeMsec() != null) { + if (query.getMeta().hasMaxTime()) { mapReduce = mapReduce.maxTime(query.getMeta().getMaxTimeMsec(), TimeUnit.MILLISECONDS); } Document mappedSort = getMappedSortObject(query, domainType); if (mappedSort != null && !mappedSort.isEmpty()) { - mapReduce = mapReduce.sort(getMappedSortObject(query, domainType)); + mapReduce = mapReduce.sort(mappedSort); } mapReduce = mapReduce @@ -1861,7 +1957,7 @@ public List mapReduce(Query query, Class domainType, String inputColle Optionals.ifAllPresent(collation, mapReduceOptions.getCollation(), (l, r) -> { throw new IllegalArgumentException( - "Both Query and MapReduceOptions define a collation. Please provide the collation only via one of the two."); + "Both Query and MapReduceOptions define a collation; Please provide the collation only via one of the two."); }); if (mapReduceOptions.getCollation().isPresent()) { @@ -1885,7 +1981,8 @@ public List mapReduce(Query query, Class domainType, String inputColle } if (mapReduceOptions.getOutputSharded().isPresent()) { - mapReduce = mapReduce.sharded(mapReduceOptions.getOutputSharded().get()); + MongoCompatibilityAdapter.mapReduceIterableAdapter(mapReduce) + .sharded(mapReduceOptions.getOutputSharded().get()); } if (StringUtils.hasText(mapReduceOptions.getOutputCollection()) && !mapReduceOptions.usesInlineOutput()) { @@ -1915,169 +2012,116 @@ public List mapReduce(Query query, Class domainType, String inputColle return mappedResults; } - public GroupByResults group(String inputCollectionName, GroupBy groupBy, Class entityClass) { - return group(null, inputCollectionName, groupBy, entityClass); - } + @Override + public AggregationResults aggregate(TypedAggregation aggregation, Class outputType) { - public GroupByResults group(@Nullable Criteria criteria, String inputCollectionName, GroupBy groupBy, - Class entityClass) { + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + return aggregate(aggregation, getCollectionName(aggregation.getInputType()), outputType); + } - Document document = groupBy.getGroupByObject(); - document.put("ns", inputCollectionName); - - if (criteria == null) { - document.put("cond", null); - } else { - document.put("cond", queryMapper.getMappedObject(criteria.getCriteriaObject(), Optional.empty())); - } - // If initial document was a JavaScript string, potentially loaded by Spring's Resource abstraction, load it and - // convert to Document - - if (document.containsKey("initial")) { - Object initialObj = document.get("initial"); - if (initialObj instanceof String) { - String initialAsString = replaceWithResourceIfNecessary((String) initialObj); - document.put("initial", Document.parse(initialAsString)); - } - } - - if (document.containsKey("$reduce")) { - document.put("$reduce", replaceWithResourceIfNecessary(ObjectUtils.nullSafeToString(document.get("$reduce")))); - } - if (document.containsKey("$keyf")) { - document.put("$keyf", replaceWithResourceIfNecessary(ObjectUtils.nullSafeToString(document.get("$keyf")))); - } - if (document.containsKey("finalize")) { - document.put("finalize", replaceWithResourceIfNecessary(ObjectUtils.nullSafeToString(document.get("finalize")))); - } - - Document commandObject = new Document("group", document); - - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing Group with Document [{}]", serializeToJsonSafely(commandObject)); - } - - Document commandResult = executeCommand(commandObject, this.readPreference); - - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Group command result = [{}]", commandResult); - } - - @SuppressWarnings("unchecked") - Iterable resultSet = (Iterable) commandResult.get("retval"); - List mappedResults = new ArrayList<>(); - DocumentCallback callback = new ReadDocumentCallback<>(mongoConverter, entityClass, inputCollectionName); - - for (Document resultDocument : resultSet) { - mappedResults.add(callback.doWith(resultDocument)); - } - - return new GroupByResults<>(mappedResults, commandResult); - } - - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.TypedAggregation, java.lang.Class) - */ - @Override - public AggregationResults aggregate(TypedAggregation aggregation, Class outputType) { - return aggregate(aggregation, getCollectionName(aggregation.getInputType()), outputType); - } - - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.TypedAggregation, java.lang.String, java.lang.Class) - */ @Override public AggregationResults aggregate(TypedAggregation aggregation, String inputCollectionName, Class outputType) { - - Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); - return aggregate(aggregation, inputCollectionName, outputType, null); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.Aggregation, java.lang.Class, java.lang.Class) - */ @Override public AggregationResults aggregate(Aggregation aggregation, Class inputType, Class outputType) { + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); return aggregate(aggregation, getCollectionName(inputType), outputType, queryOperations.createAggregation(aggregation, inputType).getAggregationOperationContext()); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.Aggregation, java.lang.String, java.lang.Class) - */ @Override public AggregationResults aggregate(Aggregation aggregation, String collectionName, Class outputType) { return aggregate(aggregation, collectionName, outputType, null); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregateStream(org.springframework.data.mongodb.core.aggregation.TypedAggregation, java.lang.String, java.lang.Class) - */ @Override - public CloseableIterator aggregateStream(TypedAggregation aggregation, String inputCollectionName, + public Stream aggregateStream(TypedAggregation aggregation, String inputCollectionName, Class outputType) { - - Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); - - AggregationOperationContext context = new TypeBasedAggregationOperationContext(aggregation.getInputType(), - mappingContext, queryMapper); - return aggregateStream(aggregation, inputCollectionName, outputType, context); + return aggregateStream(aggregation, inputCollectionName, outputType, null); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregateStream(org.springframework.data.mongodb.core.aggregation.TypedAggregation, java.lang.Class) - */ @Override - public CloseableIterator aggregateStream(TypedAggregation aggregation, Class outputType) { + public Stream aggregateStream(TypedAggregation aggregation, Class outputType) { + + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); return aggregateStream(aggregation, getCollectionName(aggregation.getInputType()), outputType); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregateStream(org.springframework.data.mongodb.core.aggregation.Aggregation, java.lang.Class, java.lang.Class) - */ @Override - public CloseableIterator aggregateStream(Aggregation aggregation, Class inputType, Class outputType) { + public Stream aggregateStream(Aggregation aggregation, Class inputType, Class outputType) { + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); return aggregateStream(aggregation, getCollectionName(inputType), outputType, - new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper)); + queryOperations.createAggregation(aggregation, inputType).getAggregationOperationContext()); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregateStream(org.springframework.data.mongodb.core.aggregation.Aggregation, java.lang.String, java.lang.Class) - */ @Override - public CloseableIterator aggregateStream(Aggregation aggregation, String collectionName, Class outputType) { + public Stream aggregateStream(Aggregation aggregation, String collectionName, Class outputType) { return aggregateStream(aggregation, collectionName, outputType, null); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.String) - */ @Override @SuppressWarnings("unchecked") public List findAllAndRemove(Query query, String collectionName) { return (List) findAllAndRemove(query, Object.class, collectionName); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ @Override public List findAllAndRemove(Query query, Class entityClass) { return findAllAndRemove(query, entityClass, getCollectionName(entityClass)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override public List findAllAndRemove(Query query, Class entityClass, String collectionName) { return doFindAndDelete(collectionName, query, entityClass); } + @Override + public UpdateResult replace(Query query, T replacement, ReplaceOptions options, String collectionName) { + + Assert.notNull(replacement, "Replacement must not be null"); + return replace(query, (Class) ClassUtils.getUserClass(replacement), replacement, options, collectionName); + } + + protected UpdateResult replace(Query query, Class entityType, T replacement, ReplaceOptions options, + String collectionName) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(replacement, "Replacement must not be null"); + Assert.notNull(options, "Options must not be null Use ReplaceOptions#none() instead"); + Assert.notNull(entityType, "EntityType must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + + Assert.isTrue(query.getLimit() <= 1, "Query must not define a limit other than 1 ore none"); + Assert.isTrue(query.getSkip() <= 0, "Query must not define skip"); + + UpdateContext updateContext = queryOperations.replaceSingleContext(query, + operations.forEntity(replacement).toMappedDocument(this.mongoConverter), options.isUpsert()); + + replacement = maybeCallBeforeConvert(replacement, collectionName); + Document mappedReplacement = updateContext.getMappedUpdate(mappingContext.getPersistentEntity(entityType)); + maybeEmitEvent(new BeforeSaveEvent<>(replacement, mappedReplacement, collectionName)); + replacement = maybeCallBeforeSave(replacement, mappedReplacement, collectionName); + + MongoAction action = new MongoAction(writeConcern, MongoActionOperation.REPLACE, collectionName, entityType, + mappedReplacement, updateContext.getQueryObject()); + + UpdateResult result = doReplace(options, entityType, collectionName, updateContext, + createCollectionPreparer(query, action), mappedReplacement); + + if (result.wasAcknowledged()) { + + maybeEmitEvent(new AfterSaveEvent<>(replacement, mappedReplacement, collectionName)); + maybeCallAfterSave(replacement, mappedReplacement, collectionName); + } + + return result; + } + /** * Retrieve and remove all documents matching the given {@code query} by calling {@link #find(Query, Class, String)} * and {@link #remove(Query, Class, String)}, whereas the {@link Query} for {@link #remove(Query, Class, String)} is @@ -2095,6 +2139,9 @@ protected List doFindAndDelete(String collectionName, Query query, Class< if (!CollectionUtils.isEmpty(result)) { Query byIdInQuery = operations.getByIdInQuery(result); + if (query.hasReadPreference()) { + byIdInQuery.withReadPreference(query.getReadPreference()); + } remove(byIdInQuery, entityClass, collectionName); } @@ -2105,9 +2152,9 @@ protected List doFindAndDelete(String collectionName, Query query, Class< protected AggregationResults aggregate(Aggregation aggregation, String collectionName, Class outputType, @Nullable AggregationOperationContext context) { - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); - Assert.notNull(outputType, "Output type must not be null!"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + Assert.notNull(outputType, "Output type must not be null"); return doAggregate(aggregation, collectionName, outputType, queryOperations.createAggregation(aggregation, context)); @@ -2132,7 +2179,7 @@ protected AggregationResults doAggregate(Aggregation aggregation, String Document command = aggregationUtil.createCommand(collectionName, aggregation, context); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing aggregation: {}", serializeToJsonSafely(command)); + LOGGER.debug(String.format("Executing aggregation: %s", serializeToJsonSafely(command))); } Document commandResult = executeCommand(command); @@ -2143,13 +2190,14 @@ protected AggregationResults doAggregate(Aggregation aggregation, String List pipeline = aggregationUtil.createPipeline(aggregation, context); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing aggregation: {} in collection {}", serializeToJsonSafely(pipeline), collectionName); + LOGGER.debug( + String.format("Executing aggregation: %s in collection %s", serializeToJsonSafely(pipeline), collectionName)); } return execute(collectionName, collection -> { List rawResult = new ArrayList<>(); - + CollectionPreparerDelegate delegate = CollectionPreparerDelegate.of(options); Class domainType = aggregation instanceof TypedAggregation ? ((TypedAggregation) aggregation).getInputType() : null; @@ -2157,16 +2205,22 @@ protected AggregationResults doAggregate(Aggregation aggregation, String () -> operations.forType(domainType) // .getCollation()); - AggregateIterable aggregateIterable = collection.aggregate(pipeline, Document.class) // - .collation(collation.map(Collation::toMongoCollation).orElse(null)) // - .allowDiskUse(options.isAllowDiskUse()); + AggregateIterable aggregateIterable = delegate.prepare(collection).aggregate(pipeline, Document.class) // + .collation(collation.map(Collation::toMongoCollation).orElse(null)); + + if (options.isAllowDiskUseSet()) { + aggregateIterable = aggregateIterable.allowDiskUse(options.isAllowDiskUse()); + } if (options.getCursorBatchSize() != null) { aggregateIterable = aggregateIterable.batchSize(options.getCursorBatchSize()); } options.getComment().ifPresent(aggregateIterable::comment); - options.getHint().ifPresent(aggregateIterable::hint); + HintFunction hintFunction = options.getHintObject().map(HintFunction::from).orElseGet(HintFunction::empty); + if (hintFunction.isPresent()) { + aggregateIterable = hintFunction.apply(mongoDbFactory, aggregateIterable::hintString, aggregateIterable::hint); + } if (options.hasExecutionTimeLimit()) { aggregateIterable = aggregateIterable.maxTime(options.getMaxTime().toMillis(), TimeUnit.MILLISECONDS); @@ -2195,13 +2249,13 @@ protected AggregationResults doAggregate(Aggregation aggregation, String } @SuppressWarnings("ConstantConditions") - protected CloseableIterator aggregateStream(Aggregation aggregation, String collectionName, - Class outputType, @Nullable AggregationOperationContext context) { + protected Stream aggregateStream(Aggregation aggregation, String collectionName, Class outputType, + @Nullable AggregationOperationContext context) { - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); - Assert.notNull(outputType, "Output type must not be null!"); - Assert.isTrue(!aggregation.getOptions().isExplain(), "Can't use explain option with streaming!"); + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(outputType, "Output type must not be null"); + Assert.isTrue(!aggregation.getOptions().isExplain(), "Can't use explain option with streaming"); AggregationDefinition aggregationDefinition = queryOperations.createAggregation(aggregation, context); @@ -2209,24 +2263,37 @@ protected CloseableIterator aggregateStream(Aggregation aggregation, Stri List pipeline = aggregationDefinition.getAggregationPipeline(); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Streaming aggregation: {} in collection {}", serializeToJsonSafely(pipeline), collectionName); + LOGGER.debug( + String.format("Streaming aggregation: %s in collection %s", serializeToJsonSafely(pipeline), collectionName)); } ReadDocumentCallback readCallback = new ReadDocumentCallback<>(mongoConverter, outputType, collectionName); - return execute(collectionName, (CollectionCallback>) collection -> { + return execute(collectionName, (CollectionCallback>) collection -> { - AggregateIterable cursor = collection.aggregate(pipeline, Document.class) // - .allowDiskUse(options.isAllowDiskUse()); + CollectionPreparerDelegate delegate = CollectionPreparerDelegate.of(options); + + AggregateIterable cursor = delegate.prepare(collection).aggregate(pipeline, Document.class); + + if (options.isAllowDiskUseSet()) { + cursor = cursor.allowDiskUse(options.isAllowDiskUse()); + } if (options.getCursorBatchSize() != null) { cursor = cursor.batchSize(options.getCursorBatchSize()); } options.getComment().ifPresent(cursor::comment); - options.getHint().ifPresent(cursor::hint); + HintFunction hintFunction = options.getHintObject().map(HintFunction::from).orElseGet(HintFunction::empty); + if (options.getHintObject().isPresent()) { + cursor = hintFunction.apply(mongoDbFactory, cursor::hintString, cursor::hint); + } + + if (options.hasExecutionTimeLimit()) { + cursor = cursor.maxTime(options.getMaxTime().toMillis(), TimeUnit.MILLISECONDS); + } - Class domainType = aggregation instanceof TypedAggregation ? ((TypedAggregation) aggregation).getInputType() + Class domainType = aggregation instanceof TypedAggregation typedAggregation ? typedAggregation.getInputType() : null; Optionals.firstNonEmpty(options::getCollation, // @@ -2234,59 +2301,35 @@ protected CloseableIterator aggregateStream(Aggregation aggregation, Stri .map(Collation::toMongoCollation) // .ifPresent(cursor::collation); - return new CloseableIterableCursorAdapter<>(cursor, exceptionTranslator, readCallback); + return new CloseableIterableCursorAdapter<>(cursor, exceptionTranslator, readCallback).stream(); }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation#query(java.lang.Class) - */ @Override public ExecutableFind query(Class domainType) { return new ExecutableFindOperationSupport(this).query(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation#update(java.lang.Class) - */ @Override public ExecutableUpdate update(Class domainType) { return new ExecutableUpdateOperationSupport(this).update(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableRemoveOperation#remove(java.lang.Class) - */ @Override public ExecutableRemove remove(Class domainType) { return new ExecutableRemoveOperationSupport(this).remove(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableAggregationOperation#aggregateAndReturn(java.lang.Class) - */ @Override public ExecutableAggregation aggregateAndReturn(Class domainType) { return new ExecutableAggregationOperationSupport(this).aggregateAndReturn(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableAggregationOperation#aggregateAndReturn(java.lang.Class) - */ @Override public ExecutableMapReduce mapReduce(Class domainType) { return new ExecutableMapReduceOperationSupport(this).mapReduce(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#insert(java.lang.Class) - */ @Override public ExecutableInsert insert(Class domainType) { return new ExecutableInsertOperationSupport(this).insert(domainType); @@ -2294,14 +2337,12 @@ public ExecutableInsert insert(Class domainType) { protected String replaceWithResourceIfNecessary(String function) { - String func = function; - if (this.resourceLoader != null && ResourceUtils.isUrl(function)) { - Resource functionResource = resourceLoader.getResource(func); + Resource functionResource = resourceLoader.getResource(function); if (!functionResource.exists()) { - throw new InvalidDataAccessApiUsageException(String.format("Resource %s not found!", function)); + throw new InvalidDataAccessApiUsageException(String.format("Resource %s not found", function)); } Scanner scanner = null; @@ -2310,7 +2351,7 @@ protected String replaceWithResourceIfNecessary(String function) { scanner = new Scanner(functionResource.getInputStream()); return scanner.useDelimiter("\\A").next(); } catch (IOException e) { - throw new InvalidDataAccessApiUsageException(String.format("Cannot read map-reduce file %s!", function), e); + throw new InvalidDataAccessApiUsageException(String.format("Cannot read map-reduce file %s", function), e); } finally { if (scanner != null) { scanner.close(); @@ -2318,18 +2359,15 @@ protected String replaceWithResourceIfNecessary(String function) { } } - return func; + return function; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#getCollectionNames() - */ + @Override @SuppressWarnings("ConstantConditions") public Set getCollectionNames() { return execute(db -> { Set result = new LinkedHashSet<>(); - for (String name : db.listCollectionNames()) { + for (String name : MongoCompatibilityAdapter.mongoDatabaseAdapter().forDb(db).listCollectionNames()) { result.add(name); } return result; @@ -2349,11 +2387,7 @@ protected MongoDatabase prepareDatabase(MongoDatabase database) { } protected , T> E maybeEmitEvent(E event) { - - if (eventPublisher != null) { - eventPublisher.publishEvent(event); - } - + eventDelegate.publishEvent(event); return event; } @@ -2402,50 +2436,82 @@ protected T maybeCallAfterConvert(T object, Document document, String collec */ @SuppressWarnings("ConstantConditions") protected MongoCollection doCreateCollection(String collectionName, Document collectionOptions) { + return doCreateCollection(collectionName, getCreateCollectionOptions(collectionOptions)); + } + + /** + * Create the specified collection using the provided options + * + * @param collectionName + * @param collectionOptions + * @return the collection that was created + * @since 3.3.3 + */ + @SuppressWarnings("ConstantConditions") + protected MongoCollection doCreateCollection(String collectionName, + CreateCollectionOptions collectionOptions) { + return execute(db -> { - CreateCollectionOptions co = new CreateCollectionOptions(); + db.createCollection(collectionName, collectionOptions); - if (collectionOptions.containsKey("capped")) { - co.capped((Boolean) collectionOptions.get("capped")); - } - if (collectionOptions.containsKey("size")) { - co.sizeInBytes(((Number) collectionOptions.get("size")).longValue()); - } - if (collectionOptions.containsKey("max")) { - co.maxDocuments(((Number) collectionOptions.get("max")).longValue()); - } + MongoCollection coll = db.getCollection(collectionName, Document.class); - if (collectionOptions.containsKey("collation")) { - co.collation(IndexConverters.fromDocument(collectionOptions.get("collation", Document.class))); + // TODO: Emit a collection created event + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Created collection [%s]", + coll.getNamespace() != null ? coll.getNamespace().getCollectionName() : collectionName)); } + return coll; + }); + } - if (collectionOptions.containsKey("validator")) { + private CreateCollectionOptions getCreateCollectionOptions(Document document) { - com.mongodb.client.model.ValidationOptions options = new com.mongodb.client.model.ValidationOptions(); + CreateCollectionOptions options = new CreateCollectionOptions(); - if (collectionOptions.containsKey("validationLevel")) { - options.validationLevel(ValidationLevel.fromString(collectionOptions.getString("validationLevel"))); - } - if (collectionOptions.containsKey("validationAction")) { - options.validationAction(ValidationAction.fromString(collectionOptions.getString("validationAction"))); - } + if (document.containsKey("capped")) { + options.capped((Boolean) document.get("capped")); + } + if (document.containsKey("size")) { + options.sizeInBytes(((Number) document.get("size")).longValue()); + } + if (document.containsKey("max")) { + options.maxDocuments(((Number) document.get("max")).longValue()); + } + + if (document.containsKey("collation")) { + options.collation(IndexConverters.fromDocument(document.get("collation", Document.class))); + } + + if (document.containsKey("validator")) { + + ValidationOptions validation = new ValidationOptions(); - options.validator(collectionOptions.get("validator", Document.class)); - co.validationOptions(options); + if (document.containsKey("validationLevel")) { + validation.validationLevel(ValidationLevel.fromString(document.getString("validationLevel"))); + } + if (document.containsKey("validationAction")) { + validation.validationAction(ValidationAction.fromString(document.getString("validationAction"))); } - db.createCollection(collectionName, co); + validation.validator(document.get("validator", Document.class)); + options.validationOptions(validation); + } - MongoCollection coll = db.getCollection(collectionName, Document.class); + if (document.containsKey("timeseries")) { - // TODO: Emit a collection created event - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Created collection [{}]", - coll.getNamespace() != null ? coll.getNamespace().getCollectionName() : collectionName); + Document timeSeries = document.get("timeseries", Document.class); + TimeSeriesOptions timeseries = new TimeSeriesOptions(timeSeries.getString("timeField")); + if (timeSeries.containsKey("metaField")) { + timeseries.metaField(timeSeries.getString("metaField")); } - return coll; - }); + if (timeSeries.containsKey("granularity")) { + timeseries.granularity(TimeSeriesGranularity.valueOf(timeSeries.getString("granularity").toUpperCase())); + } + options.timeSeriesOptions(timeseries); + } + return options; } /** @@ -2453,13 +2519,16 @@ protected MongoCollection doCreateCollection(String collectionName, Do * The query document is specified as a standard {@link Document} and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record. * @param fields the document that specifies the fields to be returned. * @param entityClass the parameterized type of the returned list. - * @return the {@link List} of converted objects. + * @return the converted object or {@literal null} if none exists. */ - protected T doFindOne(String collectionName, Document query, Document fields, Class entityClass) { - return doFindOne(collectionName, query, fields, CursorPreparer.NO_OP_PREPARER, entityClass); + @Nullable + protected T doFindOne(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class entityClass) { + return doFindOne(collectionName, collectionPreparer, query, fields, CursorPreparer.NO_OP_PREPARER, entityClass); } /** @@ -2467,29 +2536,31 @@ protected T doFindOne(String collectionName, Document query, Document fields * The query document is specified as a standard {@link Document} and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record. * @param fields the document that specifies the fields to be returned. - * @param entityClass the parameterized type of the returned list. * @param preparer the preparer used to modify the cursor on execution. - * @return the {@link List} of converted objects. + * @param entityClass the parameterized type of the returned list. + * @return the converted object or {@literal null} if none exists. * @since 2.2 */ + @Nullable @SuppressWarnings("ConstantConditions") - protected T doFindOne(String collectionName, Document query, Document fields, CursorPreparer preparer, - Class entityClass) { + protected T doFindOne(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, CursorPreparer preparer, Class entityClass) { MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); - Document mappedFields = queryContext.getMappedFields(entity, entityClass, projectionFactory); + Document mappedFields = queryContext.getMappedFields(entity, EntityProjection.nonProjecting(entityClass)); Document mappedQuery = queryContext.getMappedQuery(entity); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("findOne using query: {} fields: {} for class: {} in collection: {}", serializeToJsonSafely(query), - mappedFields, entityClass, collectionName); + LOGGER.debug(String.format("findOne using query: %s fields: %s for class: %s in collection: %s", + serializeToJsonSafely(query), mappedFields, entityClass, collectionName)); } - return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields, preparer), + return executeFindOneInternal(new FindOneCallback(collectionPreparer, mappedQuery, mappedFields, preparer), new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); } @@ -2498,13 +2569,15 @@ protected T doFindOne(String collectionName, Document query, Document fields * query document is specified as a standard Document and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record * @param fields the document that specifies the fields to be returned * @param entityClass the parameterized type of the returned list. * @return the List of converted objects. */ - protected List doFind(String collectionName, Document query, Document fields, Class entityClass) { - return doFind(collectionName, query, fields, entityClass, null, + protected List doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class entityClass) { + return doFind(collectionName, collectionPreparer, query, fields, entityClass, null, new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName)); } @@ -2514,6 +2587,7 @@ protected List doFind(String collectionName, Document query, Document fie * specified as a standard Document and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record. * @param fields the document that specifies the fields to be returned. * @param entityClass the parameterized type of the returned list. @@ -2521,27 +2595,31 @@ protected List doFind(String collectionName, Document query, Document fie * (apply limits, skips and so on). * @return the {@link List} of converted objects. */ - protected List doFind(String collectionName, Document query, Document fields, Class entityClass, - CursorPreparer preparer) { - return doFind(collectionName, query, fields, entityClass, preparer, + protected List doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class entityClass, CursorPreparer preparer) { + return doFind(collectionName, collectionPreparer, query, fields, entityClass, preparer, new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName)); } - protected List doFind(String collectionName, Document query, Document fields, Class entityClass, - @Nullable CursorPreparer preparer, DocumentCallback objectCallback) { + protected List doFind(String collectionName, + CollectionPreparer> collectionPreparer, Document query, Document fields, + Class entityClass, @Nullable CursorPreparer preparer, DocumentCallback objectCallback) { MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); - Document mappedFields = queryContext.getMappedFields(entity, entityClass, projectionFactory); + Document mappedFields = queryContext.getMappedFields(entity, EntityProjection.nonProjecting(entityClass)); Document mappedQuery = queryContext.getMappedQuery(entity); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("find using query: {} fields: {} for class: {} in collection: {}", - serializeToJsonSafely(mappedQuery), mappedFields, entityClass, collectionName); + + Document mappedSort = preparer instanceof SortingQueryCursorPreparer sqcp ? getMappedSortObject(sqcp.getSortObject(), entity) : null; + LOGGER.debug(String.format("find using query: %s fields: %s sort: %s for class: %s in collection: %s", + serializeToJsonSafely(mappedQuery), mappedFields, serializeToJsonSafely(mappedSort), entityClass, + collectionName)); } - return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields, null), + return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields, null), preparer != null ? preparer : CursorPreparer.NO_OP_PREPARER, objectCallback, collectionName); } @@ -2551,30 +2629,33 @@ protected List doFind(String collectionName, Document query, Document * * @since 2.0 */ - List doFind(String collectionName, Document query, Document fields, Class sourceClass, - Class targetClass, CursorPreparer preparer) { + List doFind(CollectionPreparer> collectionPreparer, String collectionName, + Document query, Document fields, Class sourceClass, Class targetClass, CursorPreparer preparer) { MongoPersistentEntity entity = mappingContext.getPersistentEntity(sourceClass); + EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); - Document mappedFields = queryContext.getMappedFields(entity, targetClass, projectionFactory); + Document mappedFields = queryContext.getMappedFields(entity, projection); Document mappedQuery = queryContext.getMappedQuery(entity); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("find using query: {} fields: {} for class: {} in collection: {}", - serializeToJsonSafely(mappedQuery), mappedFields, sourceClass, collectionName); + + Document mappedSort = preparer instanceof SortingQueryCursorPreparer sqcp + ? getMappedSortObject(sqcp.getSortObject(), entity) + : null; + LOGGER.debug(String.format("find using query: %s fields: %s sort: %s for class: %s in collection: %s", + serializeToJsonSafely(mappedQuery), mappedFields, serializeToJsonSafely(mappedSort), sourceClass, + collectionName)); } - return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields, null), preparer, - new ProjectingReadCallback<>(mongoConverter, sourceClass, targetClass, collectionName), collectionName); + return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields, null), preparer, + new ProjectingReadCallback<>(mongoConverter, projection, collectionName), collectionName); } /** * Convert given {@link CollectionOptions} to a document and take the domain type information into account when * creating a mapped schema for validation.
    - * This method calls {@link #convertToDocument(CollectionOptions)} for backwards compatibility and potentially - * overwrites the validator with the mapped validator document. In the long run - * {@link #convertToDocument(CollectionOptions)} will be removed so that this one becomes the only source of truth. * * @param collectionOptions can be {@literal null}. * @param targetType must not be {@literal null}. Use {@link Object} type instead. @@ -2583,43 +2664,41 @@ List doFind(String collectionName, Document query, Document fields, Cl */ protected Document convertToDocument(@Nullable CollectionOptions collectionOptions, Class targetType) { - Document doc = convertToDocument(collectionOptions); - - if (collectionOptions != null) { - - collectionOptions.getValidationOptions().ifPresent(it -> it.getValidator() // - .ifPresent(val -> doc.put("validator", getMappedValidator(val, targetType)))); + if (collectionOptions == null) { + return new Document(); } - return doc; - } + Document doc = new Document(); + collectionOptions.getCapped().ifPresent(val -> doc.put("capped", val)); + collectionOptions.getSize().ifPresent(val -> doc.put("size", val)); + collectionOptions.getMaxDocuments().ifPresent(val -> doc.put("max", val)); + collectionOptions.getCollation().ifPresent(val -> doc.append("collation", val.toDocument())); - /** - * @param collectionOptions can be {@literal null}. - * @return never {@literal null}. - * @deprecated since 2.1 in favor of {@link #convertToDocument(CollectionOptions, Class)}. - */ - @Deprecated - protected Document convertToDocument(@Nullable CollectionOptions collectionOptions) { - - Document document = new Document(); + collectionOptions.getValidationOptions().ifPresent(it -> { - if (collectionOptions != null) { + it.getValidationLevel().ifPresent(val -> doc.append("validationLevel", val.getValue())); + it.getValidationAction().ifPresent(val -> doc.append("validationAction", val.getValue())); + it.getValidator().ifPresent(val -> doc.append("validator", getMappedValidator(val, targetType))); + }); - collectionOptions.getCapped().ifPresent(val -> document.put("capped", val)); - collectionOptions.getSize().ifPresent(val -> document.put("size", val)); - collectionOptions.getMaxDocuments().ifPresent(val -> document.put("max", val)); - collectionOptions.getCollation().ifPresent(val -> document.append("collation", val.toDocument())); + collectionOptions.getTimeSeriesOptions().map(operations.forType(targetType)::mapTimeSeriesOptions).ifPresent(it -> { - collectionOptions.getValidationOptions().ifPresent(it -> { + Document timeseries = new Document("timeField", it.getTimeField()); + if (StringUtils.hasText(it.getMetaField())) { + timeseries.append("metaField", it.getMetaField()); + } + if (!Granularity.DEFAULT.equals(it.getGranularity())) { + timeseries.append("granularity", it.getGranularity().name().toLowerCase()); + } + doc.put("timeseries", timeseries); + }); - it.getValidationLevel().ifPresent(val -> document.append("validationLevel", val.getValue())); - it.getValidationAction().ifPresent(val -> document.append("validationAction", val.getValue())); - it.getValidator().ifPresent(val -> document.append("validator", getMappedValidator(val, Object.class))); - }); - } + collectionOptions.getChangeStreamOptions().map(it -> new Document("enabled", it.getPreAndPostImages())) + .ifPresent(it -> { + doc.put("changeStreamPreAndPostImages", it); + }); - return document; + return doc; } Document getMappedValidator(Validator validator, Class domainType) { @@ -2635,8 +2714,7 @@ Document getMappedValidator(Validator validator, Class domainType) { /** * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. - * The first document that matches the query is returned and also removed from the collection in the database. - *

    + * The first document that matches the query is returned and also removed from the collection in the database.
    * The query document is specified as a standard Document and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from @@ -2645,28 +2723,25 @@ Document getMappedValidator(Validator validator, Class domainType) { * @return the List of converted objects. */ @SuppressWarnings("ConstantConditions") - protected T doFindAndRemove(String collectionName, Document query, Document fields, Document sort, - @Nullable Collation collation, Class entityClass) { - - EntityReader readerToUse = this.mongoConverter; + protected T doFindAndRemove(CollectionPreparer collectionPreparer, String collectionName, Document query, + Document fields, Document sort, @Nullable Collation collation, Class entityClass) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("findAndRemove using query: {} fields: {} sort: {} for class: {} in collection: {}", - serializeToJsonSafely(query), fields, sort, entityClass, collectionName); + LOGGER.debug(String.format("findAndRemove using query: %s fields: %s sort: %s for class: %s in collection: %s", + serializeToJsonSafely(query), fields, serializeToJsonSafely(sort), entityClass, collectionName)); } MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - return executeFindOneInternal( - new FindAndRemoveCallback(queryMapper.getMappedObject(query, entity), fields, sort, collation), - new ReadDocumentCallback<>(readerToUse, entityClass, collectionName), collectionName); + return executeFindOneInternal(new FindAndRemoveCallback(collectionPreparer, + queryMapper.getMappedObject(query, entity), fields, sort, collation), + new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); } @SuppressWarnings("ConstantConditions") - protected T doFindAndModify(String collectionName, Document query, Document fields, Document sort, - Class entityClass, UpdateDefinition update, @Nullable FindAndModifyOptions options) { - - EntityReader readerToUse = this.mongoConverter; + protected T doFindAndModify(CollectionPreparer collectionPreparer, String collectionName, Document query, + Document fields, Document sort, Class entityClass, UpdateDefinition update, + @Nullable FindAndModifyOptions options) { if (options == null) { options = new FindAndModifyOptions(); @@ -2682,16 +2757,16 @@ protected T doFindAndModify(String collectionName, Document query, Document : updateContext.getMappedUpdate(entity); if (LOGGER.isDebugEnabled()) { - LOGGER.debug( - "findAndModify using query: {} fields: {} sort: {} for class: {} and update: {} " + "in collection: {}", - serializeToJsonSafely(mappedQuery), fields, sort, entityClass, serializeToJsonSafely(mappedUpdate), - collectionName); + LOGGER.debug(String.format( + "findAndModify using query: %s fields: %s sort: %s for class: %s and update: %s in collection: %s", + serializeToJsonSafely(mappedQuery), fields, serializeToJsonSafely(sort), entityClass, + serializeToJsonSafely(mappedUpdate), collectionName)); } return executeFindOneInternal( - new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, + new FindAndModifyCallback(collectionPreparer, mappedQuery, fields, sort, mappedUpdate, update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()), options), - new ReadDocumentCallback<>(readerToUse, entityClass, collectionName), collectionName); + new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); } /** @@ -2710,20 +2785,82 @@ protected T doFindAndModify(String collectionName, Document query, Document * {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}. */ @Nullable - protected T doFindAndReplace(String collectionName, Document mappedQuery, Document mappedFields, - Document mappedSort, @Nullable com.mongodb.client.model.Collation collation, Class entityType, - Document replacement, FindAndReplaceOptions options, Class resultType) { + protected T doFindAndReplace(CollectionPreparer collectionPreparer, String collectionName, Document mappedQuery, + Document mappedFields, Document mappedSort, @Nullable com.mongodb.client.model.Collation collation, + Class entityType, Document replacement, FindAndReplaceOptions options, Class resultType) { + + EntityProjection projection = operations.introspectProjection(resultType, entityType); + + return doFindAndReplace(collectionPreparer, collectionName, mappedQuery, mappedFields, mappedSort, collation, + entityType, replacement, options, projection); + } + + CollectionPreparerDelegate createDelegate(Query query) { + return CollectionPreparerDelegate.of(query); + } + + CollectionPreparer> createCollectionPreparer(Query query, @Nullable MongoAction action) { + CollectionPreparer> collectionPreparer = createDelegate(query); + if (action == null) { + return collectionPreparer; + } + return collectionPreparer.andThen(collection -> { + WriteConcern writeConcern = prepareWriteConcern(action); + return writeConcern != null ? collection.withWriteConcern(writeConcern) : collection; + }); + } + + /** + * Customize this part for findAndReplace. + * + * @param collectionName The name of the collection to perform the operation in. + * @param mappedQuery the query to look up documents. + * @param mappedFields the fields to project the result to. + * @param mappedSort the sort to be applied when executing the query. + * @param collation collation settings for the query. Can be {@literal null}. + * @param entityType the source domain type. + * @param replacement the replacement {@link Document}. + * @param options applicable options. + * @param projection the projection descriptor. + * @return {@literal null} if object does not exist, {@link FindAndReplaceOptions#isReturnNew() return new} is + * {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}. + * @since 3.4 + */ + @Nullable + private T doFindAndReplace(CollectionPreparer collectionPreparer, String collectionName, Document mappedQuery, + Document mappedFields, Document mappedSort, @Nullable com.mongodb.client.model.Collation collation, + Class entityType, Document replacement, FindAndReplaceOptions options, EntityProjection projection) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug( - "findAndReplace using query: {} fields: {} sort: {} for class: {} and replacement: {} " + "in collection: {}", - serializeToJsonSafely(mappedQuery), serializeToJsonSafely(mappedFields), serializeToJsonSafely(mappedSort), - entityType, serializeToJsonSafely(replacement), collectionName); + LOGGER + .debug(String.format( + "findAndReplace using query: %s fields: %s sort: %s for class: %s and replacement: %s " + + "in collection: %s", + serializeToJsonSafely(mappedQuery), serializeToJsonSafely(mappedFields), + serializeToJsonSafely(mappedSort), entityType, serializeToJsonSafely(replacement), collectionName)); } - return executeFindOneInternal( - new FindAndReplaceCallback(mappedQuery, mappedFields, mappedSort, replacement, collation, options), - new ProjectingReadCallback<>(mongoConverter, entityType, resultType, collectionName), collectionName); + return executeFindOneInternal(new FindAndReplaceCallback(collectionPreparer, mappedQuery, mappedFields, mappedSort, + replacement, collation, options), new ProjectingReadCallback<>(mongoConverter, projection, collectionName), + collectionName); + } + + private UpdateResult doReplace(ReplaceOptions options, Class entityType, String collectionName, + UpdateContext updateContext, CollectionPreparer> collectionPreparer, + Document replacement) { + + MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(entityType); + + ReplaceCallback replaceCallback = new ReplaceCallback(collectionPreparer, + updateContext.getMappedQuery(persistentEntity), replacement, updateContext.getReplaceOptions(entityType, it -> { + it.upsert(options.isUpsert()); + })); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("replace one using query: %s for class: %s in collection: %s", + serializeToJsonSafely(updateContext.getMappedQuery(persistentEntity)), entityType, collectionName)); + } + + return execute(collectionName, replaceCallback); } /** @@ -2802,7 +2939,8 @@ private List executeFindMultiInternal(CollectionCallback result = new ArrayList<>(); + int available = cursor.available(); + List result = available > 0 ? new ArrayList<>(available) : new ArrayList<>(); while (cursor.hasNext()) { Document object = cursor.next(); @@ -2823,9 +2961,9 @@ private void executeQueryInternal(CollectionCallback> col .initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection) .iterator()) { - while (cursor.hasNext()) { - callbackHandler.processDocument(cursor.next()); - } + while (cursor.hasNext()) { + callbackHandler.processDocument(cursor.next()); + } } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); } @@ -2857,13 +2995,29 @@ private static MongoConverter getDefaultMongoConverter(MongoDatabaseFactory fact return converter; } - private Document getMappedSortObject(Query query, Class type) { + @Nullable + private Document getMappedSortObject(@Nullable Query query, Class type) { - if (query == null || ObjectUtils.isEmpty(query.getSortObject())) { + if (query == null) { return null; } - return queryMapper.getMappedSort(query.getSortObject(), mappingContext.getPersistentEntity(type)); + return getMappedSortObject(query.getSortObject(), type); + } + + @Nullable + private Document getMappedSortObject(Document sortObject, Class type) { + return getMappedSortObject(sortObject, mappingContext.getPersistentEntity(type)); + } + + @Nullable + private Document getMappedSortObject(Document sortObject, @Nullable MongoPersistentEntity entity) { + + if (ObjectUtils.isEmpty(sortObject)) { + return null; + } + + return queryMapper.getMappedSort(sortObject, entity); } /** @@ -2892,12 +3046,15 @@ static RuntimeException potentiallyConvertRuntimeException(RuntimeException ex, */ private static class FindOneCallback implements CollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Optional fields; private final CursorPreparer cursorPreparer; - FindOneCallback(Document query, Document fields, CursorPreparer preparer) { + FindOneCallback(CollectionPreparer> collectionPreparer, Document query, Document fields, + CursorPreparer preparer) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = Optional.of(fields).filter(it -> !ObjectUtils.isEmpty(fields)); this.cursorPreparer = preparer; @@ -2906,14 +3063,8 @@ private static class FindOneCallback implements CollectionCallback { @Override public Document doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - FindIterable iterable = cursorPreparer.initiateFind(collection, col -> col.find(query, Document.class)); - - if (LOGGER.isDebugEnabled()) { - - LOGGER.debug("findOne using query: {} fields: {} in db.collection: {}", serializeToJsonSafely(query), - serializeToJsonSafely(fields.orElseGet(Document::new)), - collection.getNamespace() != null ? collection.getNamespace().getFullName() : "n/a"); - } + FindIterable iterable = cursorPreparer.initiateFind(collection, + col -> collectionPreparer.prepare(col).find(query, Document.class)); if (fields.isPresent()) { iterable = iterable.projection(fields.get()); @@ -2933,24 +3084,29 @@ public Document doInCollection(MongoCollection collection) throws Mong */ private static class FindCallback implements CollectionCallback> { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Document fields; private final @Nullable com.mongodb.client.model.Collation collation; - public FindCallback(Document query, Document fields, @Nullable com.mongodb.client.model.Collation collation) { + public FindCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, @Nullable com.mongodb.client.model.Collation collation) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(fields, "Fields must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(fields, "Fields must not be null"); + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; this.collation = collation; } + @Override public FindIterable doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - FindIterable findIterable = collection.find(query, Document.class).projection(fields); + FindIterable findIterable = collectionPreparer.prepare(collection).find(query, Document.class) + .projection(fields); if (collation != null) { findIterable = findIterable.collation(collation); @@ -2968,11 +3124,14 @@ public FindIterable doInCollection(MongoCollection collectio */ private class ExistsCallback implements CollectionCallback { + private final CollectionPreparer collectionPreparer; private final Document mappedQuery; private final com.mongodb.client.model.Collation collation; - ExistsCallback(Document mappedQuery, com.mongodb.client.model.Collation collation) { + ExistsCallback(CollectionPreparer collectionPreparer, Document mappedQuery, + com.mongodb.client.model.Collation collation) { + this.collectionPreparer = collectionPreparer; this.mappedQuery = mappedQuery; this.collation = collation; } @@ -2980,7 +3139,7 @@ private class ExistsCallback implements CollectionCallback { @Override public Boolean doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - return doCount(collection.getNamespace().getCollectionName(), mappedQuery, + return doCount(collectionPreparer, collection.getNamespace().getCollectionName(), mappedQuery, new CountOptions().limit(1).collation(collation)) > 0; } } @@ -2993,12 +3152,15 @@ public Boolean doInCollection(MongoCollection collection) throws Mongo */ private static class FindAndRemoveCallback implements CollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Document fields; private final Document sort; private final Optional collation; - FindAndRemoveCallback(Document query, Document fields, Document sort, @Nullable Collation collation) { + FindAndRemoveCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, Document sort, @Nullable Collation collation) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; @@ -3006,17 +3168,19 @@ private static class FindAndRemoveCallback implements CollectionCallback collection) throws MongoException, DataAccessException { FindOneAndDeleteOptions opts = new FindOneAndDeleteOptions().sort(sort).projection(fields); collation.map(Collation::toMongoCollation).ifPresent(opts::collation); - return collection.findOneAndDelete(query, opts); + return collectionPreparer.prepare(collection).findOneAndDelete(query, opts); } } private static class FindAndModifyCallback implements CollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Document fields; private final Document sort; @@ -3024,9 +3188,10 @@ private static class FindAndModifyCallback implements CollectionCallback arrayFilters; private final FindAndModifyOptions options; - FindAndModifyCallback(Document query, Document fields, Document sort, Object update, List arrayFilters, - FindAndModifyOptions options) { + FindAndModifyCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, Document sort, Object update, List arrayFilters, FindAndModifyOptions options) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; this.sort = sort; @@ -3035,6 +3200,7 @@ private static class FindAndModifyCallback implements CollectionCallback collection) throws MongoException, DataAccessException { FindOneAndUpdateOptions opts = new FindOneAndUpdateOptions(); @@ -3053,10 +3219,10 @@ public Document doInCollection(MongoCollection collection) throws Mong opts.arrayFilters(arrayFilters); } - if (update instanceof Document) { - return collection.findOneAndUpdate(query, (Document) update, opts); + if (update instanceof Document document) { + return collectionPreparer.prepare(collection).findOneAndUpdate(query, document, opts); } else if (update instanceof List) { - return collection.findOneAndUpdate(query, (List) update, opts); + return collectionPreparer.prepare(collection).findOneAndUpdate(query, (List) update, opts); } throw new IllegalArgumentException(String.format("Using %s is not supported in findOneAndUpdate", update)); @@ -3072,6 +3238,7 @@ public Document doInCollection(MongoCollection collection) throws Mong */ private static class FindAndReplaceCallback implements CollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Document fields; private final Document sort; @@ -3079,9 +3246,10 @@ private static class FindAndReplaceCallback implements CollectionCallback> collectionPreparer, Document query, + Document fields, Document sort, Document update, @Nullable com.mongodb.client.model.Collation collation, + FindAndReplaceOptions options) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; this.sort = sort; @@ -3090,10 +3258,6 @@ private static class FindAndReplaceCallback implements CollectionCallback collection) throws MongoException, DataAccessException { @@ -3110,7 +3274,7 @@ public Document doInCollection(MongoCollection collection) throws Mong opts.returnDocument(ReturnDocument.AFTER); } - return collection.findOneAndReplace(query, update, opts); + return collectionPreparer.prepare(collection).findOneAndReplace(query, update, opts); } } @@ -3121,7 +3285,7 @@ public Document doInCollection(MongoCollection collection) throws Mong * @author Thomas Darimont */ - interface DocumentCallback { + protected interface DocumentCallback { T doWith(Document object); } @@ -3147,19 +3311,20 @@ private class ReadDocumentCallback implements DocumentCallback { this.collectionName = collectionName; } + @Override public T doWith(Document document) { - maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName)); - T entity = reader.read(type, document); + maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName)); + T entity = reader.read(type, document); - if (entity == null) { - throw new MappingException(String.format("EntityReader %s returned null", reader)); - } + if (entity == null) { + throw new MappingException(String.format("EntityReader %s returned null", reader)); + } - maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName)); - entity = maybeCallAfterConvert(entity, document, collectionName); + maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName)); + entity = maybeCallAfterConvert(entity, document, collectionName); - return entity; + return entity; } } @@ -3173,24 +3338,18 @@ public T doWith(Document document) { */ private class ProjectingReadCallback implements DocumentCallback { - private final EntityReader reader; - private final Class entityType; - private final Class targetType; + private final MongoConverter mongoConverter; + private final EntityProjection projection; private final String collectionName; - ProjectingReadCallback(EntityReader reader, Class entityType, Class targetType, - String collectionName) { + ProjectingReadCallback(MongoConverter mongoConverter, EntityProjection projection, String collectionName) { - this.reader = reader; - this.entityType = entityType; - this.targetType = targetType; + this.mongoConverter = mongoConverter; + this.projection = projection; this.collectionName = collectionName; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoTemplate.DocumentCallback#doWith(org.bson.Document) - */ + @Override @SuppressWarnings("unchecked") public T doWith(Document document) { @@ -3198,39 +3357,40 @@ public T doWith(Document document) { return null; } - Class typeToRead = targetType.isInterface() || targetType.isAssignableFrom(entityType) ? entityType - : targetType; + maybeEmitEvent(new AfterLoadEvent<>(document, projection.getMappedType().getType(), collectionName)); - maybeEmitEvent(new AfterLoadEvent<>(document, targetType, collectionName)); - - Object entity = reader.read(typeToRead, document); + Object entity = mongoConverter.project(projection, document); if (entity == null) { - throw new MappingException(String.format("EntityReader %s returned null", reader)); + throw new MappingException(String.format("EntityReader %s returned null", mongoConverter)); } - Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, entity) : entity; - - maybeEmitEvent(new AfterConvertEvent<>(document, result, collectionName)); - return (T) maybeCallAfterConvert(result, document, collectionName); + maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName)); + return (T) maybeCallAfterConvert(entity, document, collectionName); } } - class QueryCursorPreparer implements CursorPreparer { + class QueryCursorPreparer implements SortingQueryCursorPreparer { private final Query query; + private final Document sortObject; + private final int limit; + private final long skip; private final @Nullable Class type; QueryCursorPreparer(Query query, @Nullable Class type) { + this(query, query.getSortObject(), query.getLimit(), query.getSkip(), type); + } + QueryCursorPreparer(Query query, Document sortObject, int limit, long skip, @Nullable Class type) { this.query = query; + this.sortObject = sortObject; + this.limit = limit; + this.skip = skip; this.type = type; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.CursorPreparer#prepare(com.mongodb.DBCursor) - */ + @Override public FindIterable prepare(FindIterable iterable) { FindIterable cursorToUse = iterable; @@ -3240,42 +3400,36 @@ public FindIterable prepare(FindIterable iterable) { .ifPresent(cursorToUse::collation); Meta meta = query.getMeta(); - if (query.getSkip() <= 0 && query.getLimit() <= 0 && ObjectUtils.isEmpty(query.getSortObject()) - && !StringUtils.hasText(query.getHint()) && !meta.hasValues() && !query.getCollation().isPresent()) { + HintFunction hintFunction = HintFunction.from(query.getHint()); + if (skip <= 0 && limit <= 0 && ObjectUtils.isEmpty(sortObject) && hintFunction.isEmpty() && !meta.hasValues() + && query.getCollation().isEmpty()) { return cursorToUse; } try { - if (query.getSkip() > 0) { - cursorToUse = cursorToUse.skip((int) query.getSkip()); + if (skip > 0) { + cursorToUse = cursorToUse.skip((int) skip); } - if (query.getLimit() > 0) { - cursorToUse = cursorToUse.limit(query.getLimit()); + if (limit > 0) { + cursorToUse = cursorToUse.limit(limit); } - if (!ObjectUtils.isEmpty(query.getSortObject())) { - Document sort = type != null ? getMappedSortObject(query, type) : query.getSortObject(); + if (!ObjectUtils.isEmpty(sortObject)) { + Document sort = type != null ? getMappedSortObject(sortObject, type) : sortObject; cursorToUse = cursorToUse.sort(sort); } - if (StringUtils.hasText(query.getHint())) { - - String hint = query.getHint(); - - if (BsonUtils.isJsonDocument(hint)) { - cursorToUse = cursorToUse.hint(BsonUtils.parse(hint, mongoDbFactory)); - } else { - cursorToUse = cursorToUse.hintString(hint); - } + if (hintFunction.isPresent()) { + cursorToUse = hintFunction.apply(mongoDbFactory, cursorToUse::hintString, cursorToUse::hint); } if (meta.hasValues()) { - if (StringUtils.hasText(meta.getComment())) { - cursorToUse = cursorToUse.comment(meta.getComment()); + if (meta.hasComment()) { + cursorToUse = cursorToUse.comment(meta.getRequiredComment()); } - if (meta.getMaxTimeMsec() != null) { - cursorToUse = cursorToUse.maxTime(meta.getMaxTimeMsec(), TimeUnit.MILLISECONDS); + if (meta.hasMaxTime()) { + cursorToUse = cursorToUse.maxTime(meta.getRequiredMaxTimeMsec(), TimeUnit.MILLISECONDS); } if (meta.getCursorBatchSize() != null) { @@ -3297,7 +3451,6 @@ public FindIterable prepare(FindIterable iterable) { cursorToUse = cursorToUse.partial(true); break; case SECONDARY_READS: - case SLAVE_OK: break; default: throw new IllegalArgumentException(String.format("%s is no supported flag.", option)); @@ -3312,10 +3465,10 @@ public FindIterable prepare(FindIterable iterable) { return cursorToUse; } + @Nullable @Override - public ReadPreference getReadPreference() { - return (query.getMeta().getFlags().contains(CursorOption.SECONDARY_READS) - || query.getMeta().getFlags().contains(CursorOption.SLAVE_OK)) ? ReadPreference.primaryPreferred() : null; + public Document getSortObject() { + return sortObject; } } @@ -3342,13 +3495,14 @@ static class GeoNearResultDocumentCallback implements DocumentCallback delegate, Metric metric) { - Assert.notNull(delegate, "DocumentCallback must not be null!"); + Assert.notNull(delegate, "DocumentCallback must not be null"); this.distanceField = distanceField; this.delegate = delegate; this.metric = metric; } + @Override public GeoResult doWith(Document object) { double distance = Double.NaN; @@ -3362,6 +3516,14 @@ public GeoResult doWith(Document object) { } } + /** + * @return the {@link MongoDatabaseFactory} in use. + * @since 3.1.4 + */ + public MongoDatabaseFactory getMongoDatabaseFactory() { + return mongoDbFactory; + } + /** * A {@link CloseableIterator} that is backed by a MongoDB {@link MongoCollection}. * @@ -3445,27 +3607,9 @@ public void close() { } } - /** - * @deprecated since 3.1.4. Use {@link #getMongoDatabaseFactory()} instead. - * @return the {@link MongoDatabaseFactory} in use. - */ - @Deprecated - public MongoDatabaseFactory getMongoDbFactory() { - return getMongoDatabaseFactory(); - } - - /** - * @return the {@link MongoDatabaseFactory} in use. - * @since 3.1.4 - */ - public MongoDatabaseFactory getMongoDatabaseFactory() { - return mongoDbFactory; - } - /** * {@link MongoTemplate} extension bound to a specific {@link ClientSession} that is applied when interacting with the - * server through the driver API. - *

    + * server through the driver API.
    * The prepare steps for {@link MongoDatabase} and {@link MongoCollection} proxy the target and invoke the desired * target method matching the actual arguments plus a {@link ClientSession}. * @@ -3483,16 +3627,12 @@ static class SessionBoundMongoTemplate extends MongoTemplate { */ SessionBoundMongoTemplate(ClientSession session, MongoTemplate that) { - super(that.getMongoDbFactory().withSession(session), that); + super(that.getMongoDatabaseFactory().withSession(session), that); this.delegate = that; this.session = session; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoTemplate#getCollection(java.lang.String) - */ @Override public MongoCollection getCollection(String collectionName) { @@ -3500,15 +3640,44 @@ public MongoCollection getCollection(String collectionName) { return delegate.getCollection(collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoTemplate#getDb() - */ @Override public MongoDatabase getDb() { // native MongoDB objects that offer methods with ClientSession must not be proxied. return delegate.getDb(); } + + @Override + protected boolean countCanBeEstimated(Document filter, CountOptions options) { + return false; + } + } + + @FunctionalInterface + interface CountExecution { + long countDocuments(CollectionPreparer collectionPreparer, String collection, Document filter, + CountOptions options); + } + + private static class ReplaceCallback implements CollectionCallback { + + private final CollectionPreparer> collectionPreparer; + private final Document query; + private final Document update; + private final com.mongodb.client.model.ReplaceOptions options; + + ReplaceCallback(CollectionPreparer> collectionPreparer, Document query, Document update, + com.mongodb.client.model.ReplaceOptions options) { + this.collectionPreparer = collectionPreparer; + this.query = query; + this.update = update; + this.options = options; + } + + @Override + public UpdateResult doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { + return collectionPreparer.prepare(collection).replaceOne(query, update, options); + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/PropertyOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/PropertyOperations.java index 5eb9f110b6..583b243aa8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/PropertyOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/PropertyOperations.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,18 +16,19 @@ package org.springframework.data.mongodb.core; import org.bson.Document; -import org.springframework.data.mapping.SimplePropertyHandler; + import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.data.projection.ProjectionFactory; -import org.springframework.data.projection.ProjectionInformation; -import org.springframework.util.ClassUtils; +import org.springframework.data.mongodb.core.mapping.PersistentPropertyTranslator; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.util.Predicates; /** * Common operations performed on properties of an entity like extracting fields information for projection creation. * * @author Christoph Strobl + * @author Mark Paluch * @since 2.1 */ class PropertyOperations { @@ -40,37 +41,37 @@ class PropertyOperations { /** * For cases where {@code fields} is {@link Document#isEmpty() empty} include only fields that are required for - * creating the projection (target) type if the {@code targetType} is a {@literal DTO projection} or a + * creating the projection (target) type if the {@code EntityProjection} is a {@literal DTO projection} or a * {@literal closed interface projection}. * - * @param projectionFactory must not be {@literal null}. + * @param projection must not be {@literal null}. * @param fields must not be {@literal null}. - * @param domainType must not be {@literal null}. - * @param targetType must not be {@literal null}. * @return {@link Document} with fields to be included. */ - Document computeFieldsForProjection(ProjectionFactory projectionFactory, Document fields, Class domainType, - Class targetType) { + Document computeMappedFieldsForProjection(EntityProjection projection, + Document fields) { - if (!fields.isEmpty() || ClassUtils.isAssignable(domainType, targetType)) { + if (!projection.isClosedProjection()) { return fields; } Document projectedFields = new Document(); - if (targetType.isInterface()) { - - ProjectionInformation projectionInformation = projectionFactory.getProjectionInformation(targetType); - - if (projectionInformation.isClosed()) { - projectionInformation.getInputProperties().forEach(it -> projectedFields.append(it.getName(), 1)); - } + if (projection.getMappedType().getType().isInterface()) { + projection.forEach(it -> { + projectedFields.put(it.getPropertyPath().getSegment(), 1); + }); } else { - MongoPersistentEntity entity = mappingContext.getPersistentEntity(targetType); - if (entity != null) { - entity.doWithProperties( - (SimplePropertyHandler) persistentProperty -> projectedFields.append(persistentProperty.getName(), 1)); + // DTO projections use merged metadata between domain type and result type + PersistentPropertyTranslator translator = PersistentPropertyTranslator.create( + mappingContext.getRequiredPersistentEntity(projection.getDomainType()), + Predicates.negate(MongoPersistentProperty::hasExplicitFieldName)); + + MongoPersistentEntity persistentEntity = mappingContext + .getRequiredPersistentEntity(projection.getMappedType()); + for (MongoPersistentProperty property : persistentEntity) { + projectedFields.put(translator.translate(property).getFieldName(), 1); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java index 1ec8fc9366..28ca85fbd7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 the original author or authors. + * Copyright 2020-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,6 +21,7 @@ import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; @@ -28,6 +29,8 @@ import org.bson.BsonValue; import org.bson.Document; import org.bson.codecs.Codec; +import org.bson.conversions.Bson; +import org.bson.types.ObjectId; import org.springframework.data.mapping.PropertyPath; import org.springframework.data.mapping.PropertyReferenceException; import org.springframework.data.mapping.context.MappingContext; @@ -45,21 +48,22 @@ import org.springframework.data.mongodb.core.aggregation.TypedAggregation; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.mapping.MongoId; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.ShardKey; import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Meta; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; import org.springframework.data.mongodb.util.BsonUtils; -import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.projection.EntityProjection; import org.springframework.data.util.Lazy; import org.springframework.lang.Nullable; import org.springframework.util.ClassUtils; -import org.springframework.util.ObjectUtils; -import org.springframework.util.StringUtils; import com.mongodb.client.model.CountOptions; import com.mongodb.client.model.DeleteOptions; @@ -73,6 +77,7 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Florian Lüdiger * @since 3.0 */ class QueryOperations { @@ -107,6 +112,14 @@ class QueryOperations { this.aggregationUtil = new AggregationUtil(queryMapper, mappingContext); } + InsertContext createInsertContext(Document source) { + return createInsertContext(MappedDocument.of(source)); + } + + InsertContext createInsertContext(MappedDocument mappedDocument) { + return new InsertContext(mappedDocument); + } + /** * Create a new {@link QueryContext} instance. * @@ -182,6 +195,15 @@ UpdateContext replaceSingleContext(MappedDocument replacement, boolean upsert) { return new UpdateContext(replacement, upsert); } + /** + * @param replacement the {@link MappedDocument mapped replacement} document. + * @param upsert use {@literal true} to insert diff when no existing document found. + * @return new instance of {@link UpdateContext}. + */ + UpdateContext replaceSingleContext(Query query, MappedDocument replacement, boolean upsert) { + return new UpdateContext(query, replacement, upsert); + } + /** * Create a new {@link DeleteContext} instance removing all matching documents. * @@ -227,6 +249,57 @@ AggregationDefinition createAggregation(Aggregation aggregation, return new AggregationDefinition(aggregation, aggregationOperationContext); } + /** + * {@link InsertContext} encapsulates common tasks required to interact with {@link Document} to be inserted. + * + * @since 3.4.3 + */ + class InsertContext { + + private final MappedDocument source; + + private InsertContext(MappedDocument source) { + this.source = source; + } + + /** + * Prepare the {@literal _id} field. May generate a new {@literal id} value and convert it to the id properties + * {@link MongoPersistentProperty#getFieldType() target type}. + * + * @param type must not be {@literal null}. + * @param + * @return the {@link MappedDocument} containing the changes. + * @see #prepareId(MongoPersistentEntity) + */ + MappedDocument prepareId(Class type) { + return prepareId(mappingContext.getPersistentEntity(type)); + } + + /** + * Prepare the {@literal _id} field. May generate a new {@literal id} value and convert it to the id properties + * {@link MongoPersistentProperty#getFieldType() target type}. + * + * @param entity can be {@literal null}. + * @param + * @return the {@link MappedDocument} containing the changes. + */ + MappedDocument prepareId(@Nullable MongoPersistentEntity entity) { + + if (entity == null || source.hasId()) { + return source; + } + + MongoPersistentProperty idProperty = entity.getIdProperty(); + if (idProperty != null + && (idProperty.hasExplicitWriteTarget() || idProperty.isAnnotationPresent(MongoId.class))) { + if (!ClassUtils.isAssignable(ObjectId.class, idProperty.getFieldType())) { + source.updateId(queryMapper.convertId(new ObjectId(), idProperty.getFieldType())); + } + } + return source; + } + } + /** * {@link QueryContext} encapsulates common tasks required to convert a {@link Query} into its MongoDB document * representation, mapping field names, as well as determining and applying {@link Collation collations}. @@ -288,45 +361,58 @@ Document getMappedQuery(@Nullable MongoPersistentEntity entity) { return queryMapper.getMappedObject(getQueryObject(), entity); } - Document getMappedFields(@Nullable MongoPersistentEntity entity, Class targetType, - ProjectionFactory projectionFactory) { + Document getMappedFields(@Nullable MongoPersistentEntity entity, EntityProjection projection) { - Document fields = new Document(); + Document fields = evaluateFields(entity); - for (Entry entry : query.getFieldsObject().entrySet()) { + if (entity == null) { + return fields; + } - if (entry.getValue() instanceof MongoExpression) { + Document mappedFields; + if (!fields.isEmpty()) { + mappedFields = queryMapper.getMappedFields(fields, entity); + } else { + mappedFields = propertyOperations.computeMappedFieldsForProjection(projection, fields); + mappedFields = queryMapper.addMetaAttributes(mappedFields, entity); + } - AggregationOperationContext ctx = entity == null ? Aggregation.DEFAULT_CONTEXT - : new RelaxedTypeBasedAggregationOperationContext(entity.getType(), mappingContext, queryMapper); + if (entity.hasTextScoreProperty() && mappedFields.containsKey(entity.getTextScoreProperty().getFieldName()) + && !query.getQueryObject().containsKey("$text")) { + mappedFields.remove(entity.getTextScoreProperty().getFieldName()); + } - fields.put(entry.getKey(), AggregationExpression.from((MongoExpression) entry.getValue()).toDocument(ctx)); - } else { - fields.put(entry.getKey(), entry.getValue()); - } + if (mappedFields.isEmpty()) { + return BsonUtils.EMPTY_DOCUMENT; } - Document mappedFields = fields; + return mappedFields; + } - if (entity == null) { - return mappedFields; - } + private Document evaluateFields(@Nullable MongoPersistentEntity entity) { - Document projectedFields = propertyOperations.computeFieldsForProjection(projectionFactory, fields, - entity.getType(), targetType); + Document fields = query.getFieldsObject(); - if (ObjectUtils.nullSafeEquals(fields, projectedFields)) { - mappedFields = queryMapper.getMappedFields(projectedFields, entity); - } else { - mappedFields = queryMapper.getMappedFields(projectedFields, - mappingContext.getRequiredPersistentEntity(targetType)); + if (fields.isEmpty()) { + return BsonUtils.EMPTY_DOCUMENT; } - if (entity.hasTextScoreProperty() && !query.getQueryObject().containsKey("$text")) { - mappedFields.remove(entity.getTextScoreProperty().getFieldName()); + Document evaluated = new Document(); + + for (Entry entry : fields.entrySet()) { + + if (entry.getValue() instanceof MongoExpression mongoExpression) { + + AggregationOperationContext ctx = entity == null ? Aggregation.DEFAULT_CONTEXT + : new RelaxedTypeBasedAggregationOperationContext(entity.getType(), mappingContext, queryMapper); + + evaluated.put(entry.getKey(), AggregationExpression.from(mongoExpression).toDocument(ctx)); + } else { + evaluated.put(entry.getKey(), entry.getValue()); + } } - return mappedFields; + return evaluated; } /** @@ -348,7 +434,7 @@ Document getMappedSort(@Nullable MongoPersistentEntity entity) { * @param consumer must not be {@literal null}. */ void applyCollation(@Nullable Class domainType, Consumer consumer) { - getCollation(domainType).ifPresent(consumer::accept); + getCollation(domainType).ifPresent(consumer); } /** @@ -364,6 +450,25 @@ Optional getCollation(@Nullable Class dom return entityOperations.forType(domainType).getCollation(query) // .map(Collation::toMongoCollation); } + + /** + * Get the {@link HintFunction} reading the actual hint form the {@link Query}. + * + * @return new instance of {@link HintFunction}. + * @since 4.2 + */ + HintFunction getHintFunction() { + return HintFunction.from(query.getHint()); + } + + /** + * Read and apply the hint from the {@link Query}. + * + * @since 4.2 + */ + void applyHint(Function stringConsumer, Function bsonConsumer) { + getHintFunction().ifPresent(codecRegistryProvider, stringConsumer, bsonConsumer); + } } /** @@ -383,13 +488,12 @@ class DistinctQueryContext extends QueryContext { */ private DistinctQueryContext(@Nullable Object query, String fieldName) { - super(query instanceof Document ? new BasicQuery((Document) query) : (Query) query); + super(query instanceof Document document ? new BasicQuery(document) : (Query) query); this.fieldName = fieldName; } @Override - Document getMappedFields(@Nullable MongoPersistentEntity entity, Class targetType, - ProjectionFactory projectionFactory) { + Document getMappedFields(@Nullable MongoPersistentEntity entity, EntityProjection projection) { return getMappedFields(entity); } @@ -423,7 +527,7 @@ Class getDriverCompatibleClass(Class type) { } /** - * Get the most speficic read target type based on the user {@literal requestedTargetType} an the property type + * Get the most specific read target type based on the user {@literal requestedTargetType} an the property type * based on meta information extracted from the {@literal domainType}. * * @param requestedTargetType must not be {@literal null}. @@ -492,19 +596,29 @@ CountOptions getCountOptions(@Nullable Class domainType, @Nullable Consumer 0) { options.limit(query.getLimit()); } + if (query.getSkip() > 0) { options.skip((int) query.getSkip()); } - if (StringUtils.hasText(query.getHint())) { - String hint = query.getHint(); - if (BsonUtils.isJsonDocument(hint)) { - options.hint(BsonUtils.parse(hint, codecRegistryProvider)); - } else { - options.hintString(hint); + Meta meta = query.getMeta(); + if (meta.hasValues()) { + + if (meta.hasMaxTime()) { + options.maxTime(meta.getRequiredMaxTimeMsec(), TimeUnit.MILLISECONDS); + } + + if (meta.hasComment()) { + options.comment(meta.getComment()); } } + HintFunction hintFunction = HintFunction.from(query.getHint()); + + if (hintFunction.isPresent()) { + options = hintFunction.apply(codecRegistryProvider, options::hintString, options::hint); + } + if (callback != null) { callback.accept(options); } @@ -612,8 +726,12 @@ class UpdateContext extends QueryContext { } UpdateContext(MappedDocument update, boolean upsert) { + this(new BasicQuery(BsonUtils.asDocument(update.getIdFilter())), update, upsert); + } - super(new BasicQuery(new Document(BsonUtils.asMap(update.getIdFilter())))); + UpdateContext(Query query, MappedDocument update, boolean upsert) { + + super(query); this.multi = false; this.upsert = upsert; this.mappedDocument = update; @@ -623,7 +741,7 @@ class UpdateContext extends QueryContext { /** * Get the {@link UpdateOptions} applicable for the {@link Query}. * - * @param domainType must not be {@literal null}. + * @param domainType can be {@literal null}. * @return never {@literal null}. */ UpdateOptions getUpdateOptions(@Nullable Class domainType) { @@ -634,11 +752,10 @@ UpdateOptions getUpdateOptions(@Nullable Class domainType) { * Get the {@link UpdateOptions} applicable for the {@link Query}. * * @param domainType can be {@literal null}. - * @param callback a callback to modify the generated options. Can be {@literal null}. - * @return + * @param query can be {@literal null} + * @return never {@literal null}. */ - UpdateOptions getUpdateOptions(@Nullable Class domainType, @Nullable Consumer callback) { - + UpdateOptions getUpdateOptions(@Nullable Class domainType, @Nullable Query query) { UpdateOptions options = new UpdateOptions(); options.upsert(upsert); @@ -647,12 +764,13 @@ UpdateOptions getUpdateOptions(@Nullable Class domainType, @Nullable Consumer .arrayFilters(update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList())); } - applyCollation(domainType, options::collation); - - if (callback != null) { - callback.accept(options); + if (query != null && query.isSorted()) { + options.sort(getMappedSort(domainType != null ? mappingContext.getPersistentEntity(domainType) : null)); } + HintFunction.from(getQuery().getHint()).ifPresent(codecRegistryProvider, options::hintString, options::hint); + applyCollation(domainType, options::collation); + return options; } @@ -680,6 +798,10 @@ ReplaceOptions getReplaceOptions(@Nullable Class domainType, @Nullable Consum ReplaceOptions options = new ReplaceOptions(); options.collation(updateOptions.getCollation()); options.upsert(updateOptions.isUpsert()); + applyHint(options::hintString, options::hint); + if (!isMulti() && getQuery().isSorted()) { + options.sort(getMappedSort(domainType != null ? mappingContext.getPersistentEntity(domainType) : null)); + } if (callback != null) { callback.accept(options); @@ -690,13 +812,23 @@ ReplaceOptions getReplaceOptions(@Nullable Class domainType, @Nullable Consum @Override Document getMappedQuery(@Nullable MongoPersistentEntity domainType) { + return applyIsolation(super.getMappedQuery(domainType)); + } - Document mappedQuery = super.getMappedQuery(domainType); + /** + * A replacement query that is derived from the already {@link MappedDocument}. + * + * @return + */ + Document getReplacementQuery() { + return applyIsolation(getQueryObject()); + } - if (multi && update.isIsolated() && !mappedQuery.containsKey("$isolated")) { + private Document applyIsolation(Document mappedQuery) { + if (multi && update != null && update.isIsolated() && !mappedQuery.containsKey("$isolated")) { + mappedQuery = new Document(mappedQuery); mappedQuery.put("$isolated", 1); } - return mappedQuery; } @@ -707,7 +839,7 @@ Document applyShardKey(MongoPersistentEntity domainType, Document filter, Document filterWithShardKey = new Document(filter); getMappedShardKeyFields(domainType) - .forEach(key -> filterWithShardKey.putIfAbsent(key, BsonUtils.resolveValue(shardKeySource, key))); + .forEach(key -> filterWithShardKey.putIfAbsent(key, BsonUtils.resolveValue((Bson) shardKeySource, key))); return filterWithShardKey; } @@ -731,7 +863,7 @@ private boolean shardedById(MongoPersistentEntity domainType) { } String key = shardKey.getPropertyNames().iterator().next(); - if ("_id".equals(key)) { + if (FieldName.ID.name().equals(key)) { return true; } @@ -789,7 +921,7 @@ void increaseVersionForUpdateIfNecessary(@Nullable MongoPersistentEntity pers if (persistentEntity != null && persistentEntity.hasVersionProperty()) { String versionFieldName = persistentEntity.getRequiredVersionProperty().getFieldName(); - if (!update.modifies(versionFieldName)) { + if (update != null && !update.modifies(versionFieldName)) { update.inc(versionFieldName); } } @@ -837,10 +969,10 @@ class AggregationDefinition { this.aggregation = aggregation; - if (aggregation instanceof TypedAggregation) { - this.inputType = ((TypedAggregation) aggregation).getInputType(); - } else if (aggregationOperationContext instanceof TypeBasedAggregationOperationContext) { - this.inputType = ((TypeBasedAggregationOperationContext) aggregationOperationContext).getType(); + if (aggregation instanceof TypedAggregation typedAggregation) { + this.inputType = typedAggregation.getInputType(); + } else if (aggregationOperationContext instanceof TypeBasedAggregationOperationContext typeBasedAggregationOperationContext) { + this.inputType = typeBasedAggregationOperationContext.getType(); } else { this.inputType = null; } @@ -865,8 +997,8 @@ class AggregationDefinition { this.aggregation = aggregation; - if (aggregation instanceof TypedAggregation) { - this.inputType = ((TypedAggregation) aggregation).getInputType(); + if (aggregation instanceof TypedAggregation typedAggregation) { + this.inputType = typedAggregation.getInputType(); } else { this.inputType = inputType; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java index 279bb4fd52..54129e6b5d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java index 35c8052e72..954fd61716 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,19 +41,15 @@ class ReactiveAggregationOperationSupport implements ReactiveAggregationOperatio */ ReactiveAggregationOperationSupport(ReactiveMongoTemplate template) { - Assert.notNull(template, "Template must not be null!"); + Assert.notNull(template, "Template must not be null"); this.template = template; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveAggregationOperation#aggregateAndReturn(java.lang.Class) - */ @Override public ReactiveAggregation aggregateAndReturn(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ReactiveAggregationSupport<>(template, domainType, null, null); } @@ -75,34 +71,22 @@ static class ReactiveAggregationSupport this.collection = collection; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveAggregationOperation.AggregationOperationWithCollection#inCollection(java.lang.String) - */ @Override public AggregationOperationWithAggregation inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty!"); + Assert.hasText(collection, "Collection must not be null nor empty"); return new ReactiveAggregationSupport<>(template, domainType, aggregation, collection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveAggregationOperation.AggregationOperationWithAggregation#by(org.springframework.data.mongodb.core.Aggregation) - */ @Override public TerminatingAggregationOperation by(Aggregation aggregation) { - Assert.notNull(aggregation, "Aggregation must not be null!"); + Assert.notNull(aggregation, "Aggregation must not be null"); return new ReactiveAggregationSupport<>(template, domainType, aggregation, collection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveAggregationOperation.TerminatingAggregationOperation#all() - */ @Override public Flux all() { return template.aggregate(aggregation, getCollectionName(aggregation), domainType); @@ -114,9 +98,7 @@ private String getCollectionName(Aggregation aggregation) { return collection; } - if (aggregation instanceof TypedAggregation) { - - TypedAggregation typedAggregation = (TypedAggregation) aggregation; + if (aggregation instanceof TypedAggregation typedAggregation) { if (typedAggregation.getInputType() != null) { return template.getCollectionName(typedAggregation.getInputType()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveBulkOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveBulkOperations.java new file mode 100644 index 0000000000..7f88b63f28 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveBulkOperations.java @@ -0,0 +1,144 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Mono; + +import java.util.List; + +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UpdateDefinition; + +import com.mongodb.bulk.BulkWriteResult; + +/** + * Bulk operations for insert/update/remove actions on a collection. Bulk operations are available since MongoDB 2.6 and + * make use of low level bulk commands on the protocol level. This interface defines a fluent API to add multiple single + * operations or list of similar operations in sequence which can then eventually be executed by calling + * {@link #execute()}. + * + *

    + * ReactiveMongoOperations ops = …;
    + *
    + * ops.bulkOps(BulkMode.UNORDERED, Person.class)
    + * 				.insert(newPerson)
    + * 				.updateOne(where("firstname").is("Joe"), Update.update("lastname", "Doe"))
    + * 				.execute();
    + * 
    + *

    + * Bulk operations are issued as one batch that pulls together all insert, update, and delete operations. Operations + * that require individual operation results such as optimistic locking (using {@code @Version}) are not supported and + * the version field remains not populated. + * + * @author Christoph Strobl + * @since 4.1 + */ +public interface ReactiveBulkOperations { + + /** + * Add a single insert to the bulk operation. + * + * @param documents the document to insert, must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the insert added, will never be {@literal null}. + */ + ReactiveBulkOperations insert(Object documents); + + /** + * Add a list of inserts to the bulk operation. + * + * @param documents List of documents to insert, must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the insert added, will never be {@literal null}. + */ + ReactiveBulkOperations insert(List documents); + + /** + * Add a single update to the bulk operation. For the update request, only the first matching document is updated. + * + * @param query update criteria, must not be {@literal null}. The {@link Query} may define a {@link Query#with(Sort) + * sort order} to influence which document to update when potentially matching multiple candidates. + * @param update {@link UpdateDefinition} operation to perform, must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}. + */ + ReactiveBulkOperations updateOne(Query query, UpdateDefinition update); + + /** + * Add a single update to the bulk operation. For the update request, all matching documents are updated. + * + * @param query Update criteria. + * @param update Update operation to perform. + * @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}. + */ + ReactiveBulkOperations updateMulti(Query query, UpdateDefinition update); + + /** + * Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty, + * else an insert. + * + * @param query Update criteria. + * @param update Update operation to perform. + * @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}. + */ + ReactiveBulkOperations upsert(Query query, UpdateDefinition update); + + /** + * Add a single remove operation to the bulk operation. + * + * @param remove the {@link Query} to select the documents to be removed, must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the removal added, will never be {@literal null}. + */ + ReactiveBulkOperations remove(Query remove); + + /** + * Add a list of remove operations to the bulk operation. + * + * @param removes the remove operations to perform, must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the removal added, will never be {@literal null}. + */ + ReactiveBulkOperations remove(List removes); + + /** + * Add a single replace operation to the bulk operation. + * + * @param query Replace criteria. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence + * which document to replace when potentially matching multiple candidates. + * @param replacement the replacement document. Must not be {@literal null}. The {@link Query} may define a + * {@link Query#with(Sort) sort order} to influence which document to replace when potentially matching + * multiple candidates. + * @return the current {@link ReactiveBulkOperations} instance with the replace added, will never be {@literal null}. + */ + default ReactiveBulkOperations replaceOne(Query query, Object replacement) { + return replaceOne(query, replacement, FindAndReplaceOptions.empty()); + } + + /** + * Add a single replace operation to the bulk operation. + * + * @param query Replace criteria. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence + * which document to replace when potentially matching multiple candidates. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the replace added, will never be {@literal null}. + */ + ReactiveBulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options); + + /** + * Execute all bulk operations using the default write concern. + * + * @return a {@link Mono} emitting the result of the bulk operation providing counters for inserts/updates etc. + */ + Mono execute(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java index 279f4184fb..4f936e0ffa 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2021 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -71,7 +71,7 @@ interface TerminatingChangeStream { /** * Start listening to changes. The stream will not be completed unless the {@link org.reactivestreams.Subscription} * is {@link org.reactivestreams.Subscription#cancel() canceled}. - *

    + *
    * However, the stream may become dead, or invalid, if all watched collections, databases are dropped. */ Flux> listen(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupport.java index 25d23322a0..afeb6c5e0e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupport.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2021 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -46,14 +46,10 @@ class ReactiveChangeStreamOperationSupport implements ReactiveChangeStreamOperat this.template = template; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation#changeStream(java.lang.Class) - */ @Override public ReactiveChangeStream changeStream(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ReactiveChangeStreamSupport<>(template, domainType, domainType, null, null); } @@ -76,51 +72,35 @@ private ReactiveChangeStreamSupport(ReactiveMongoTemplate template, Class dom this.options = options; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithCollection#watchCollection(java.lang.String) - */ @Override public ChangeStreamWithFilterAndProjection watchCollection(String collection) { - Assert.hasText(collection, "Collection name must not be null nor empty!"); + Assert.hasText(collection, "Collection name must not be null nor empty"); return new ReactiveChangeStreamSupport<>(template, domainType, returnType, collection, options); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithCollection#watchCollection(java.lang.Class) - */ @Override public ChangeStreamWithFilterAndProjection watchCollection(Class entityClass) { - Assert.notNull(entityClass, "Collection type not be null!"); + Assert.notNull(entityClass, "Collection type not be null"); return watchCollection(template.getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ResumingChangeStream#resumeAt(java.lang.Object) - */ @Override public TerminatingChangeStream resumeAt(Object token) { return withOptions(builder -> { - if (token instanceof Instant) { - builder.resumeAt((Instant) token); - } else if (token instanceof BsonTimestamp) { - builder.resumeAt((BsonTimestamp) token); + if (token instanceof Instant instant) { + builder.resumeAt(instant); + } else if (token instanceof BsonTimestamp bsonTimestamp) { + builder.resumeAt(bsonTimestamp); } }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ResumingChangeStream#resumeAfter(java.lang.Object) - */ @Override public TerminatingChangeStream resumeAfter(Object token) { @@ -129,10 +109,6 @@ public TerminatingChangeStream resumeAfter(Object token) { return withOptions(builder -> builder.resumeAfter((BsonValue) token)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ResumingChangeStream#startAfter(java.lang.Object) - */ @Override public TerminatingChangeStream startAfter(Object token) { @@ -141,10 +117,6 @@ public TerminatingChangeStream startAfter(Object token) { return withOptions(builder -> builder.startAfter((BsonValue) token)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithOptions#withOptions(java.util.function.Consumer) - */ @Override public ReactiveChangeStreamSupport withOptions(Consumer optionsConsumer) { @@ -154,31 +126,19 @@ public ReactiveChangeStreamSupport withOptions(Consumer(template, domainType, returnType, collection, builder.build()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithProjection#as(java.lang.Class) - */ @Override public ChangeStreamWithFilterAndProjection as(Class resultType) { - Assert.notNull(resultType, "ResultType must not be null!"); + Assert.notNull(resultType, "ResultType must not be null"); return new ReactiveChangeStreamSupport<>(template, domainType, resultType, collection, options); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithFilter#filter(org.springframework.data.mongodb.core.aggregation.Aggregation) - */ @Override public ChangeStreamWithFilterAndProjection filter(Aggregation filter) { return withOptions(builder -> builder.filter(filter)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithFilter#filter(org.springframework.data.mongodb.core.query.CriteriaDefinition) - */ @Override public ChangeStreamWithFilterAndProjection filter(CriteriaDefinition by) { @@ -188,10 +148,6 @@ public ChangeStreamWithFilterAndProjection filter(CriteriaDefinition by) { return filter(aggregation); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.TerminatingChangeStream#listen() - */ @Override public Flux> listen() { return template.changeStream(collection, options != null ? options : ChangeStreamOptions.empty(), returnType); @@ -205,13 +161,14 @@ private ChangeStreamOptionsBuilder initOptionsBuilder() { } options.getFilter().ifPresent(it -> { - if (it instanceof Aggregation) { - builder.filter((Aggregation) it); + if (it instanceof Aggregation aggregation) { + builder.filter(aggregation); } else { builder.filter(((List) it).toArray(new Document[0])); } }); options.getFullDocumentLookup().ifPresent(builder::fullDocumentLookup); + options.getFullDocumentBeforeChangeLookup().ifPresent(builder::fullDocumentBeforeChangeLookup); options.getCollation().ifPresent(builder::collation); if (options.isResumeAfter()) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java index a83d1186d0..dda6bf1b96 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java index 1e03b5cdd1..470fd05ef7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java index 9a65090922..cba827ffed 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,6 +18,9 @@ import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Window; import org.springframework.data.geo.GeoResult; import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.NearQuery; @@ -87,14 +90,27 @@ interface TerminatingFind { */ Flux all(); + /** + * Return a scroll of elements either starting or resuming at {@link ScrollPosition}. + *

    + * When using {@link KeysetScrollPosition}, make sure to use non-nullable + * {@link org.springframework.data.domain.Sort sort properties} as MongoDB does not support criteria to reconstruct + * a query result from absent document fields or {@code null} values through {@code $gt/$lt} operators. + * + * @param scrollPosition the scroll position. + * @return a scroll of the resulting elements. + * @since 4.1 + * @see org.springframework.data.domain.OffsetScrollPosition + * @see org.springframework.data.domain.KeysetScrollPosition + */ + Mono> scroll(ScrollPosition scrollPosition); + /** * Get all matching elements using a {@link com.mongodb.CursorType#TailableAwait tailable cursor}. The stream will * not be completed unless the {@link org.reactivestreams.Subscription} is - * {@link org.reactivestreams.Subscription#cancel() canceled}. - *

    + * {@link org.reactivestreams.Subscription#cancel() canceled}.
    * However, the stream may become dead, or invalid, if either the query returns no match or the cursor returns the - * document at the "end" of the collection and then the application deletes that document. - *

    + * document at the "end" of the collection and then the application deletes that document.
    * A stream that is no longer in use must be {@link reactor.core.Disposable#dispose()} disposed} otherwise the * streams will linger and exhaust resources.
    * NOTE: Requires a capped collection. @@ -105,8 +121,7 @@ interface TerminatingFind { Flux tail(); /** - * Get the number of matching elements. - *

    + * Get the number of matching elements.
    * This method uses an * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupport.java index 9e8dc8d8b2..d1aec8af36 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupport.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,6 +20,9 @@ import org.bson.Document; import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.data.domain.Window; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.mongodb.core.CollectionPreparerSupport.ReactiveCollectionPreparerDelegate; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.SerializationUtils; @@ -44,14 +47,10 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation { this.template = template; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation#query(java.lang.Class) - */ @Override public ReactiveFind query(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ReactiveFindSupport<>(template, domainType, domainType, null, ALL_QUERY); } @@ -71,8 +70,8 @@ static class ReactiveFindSupport private final String collection; private final Query query; - ReactiveFindSupport(ReactiveMongoTemplate template, Class domainType, Class returnType, - String collection, Query query) { + ReactiveFindSupport(ReactiveMongoTemplate template, Class domainType, Class returnType, String collection, + Query query) { this.template = template; this.domainType = domainType; @@ -81,46 +80,30 @@ static class ReactiveFindSupport this.query = query; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithCollection#inCollection(java.lang.String) - */ @Override public FindWithProjection inCollection(String collection) { - Assert.hasText(collection, "Collection name must not be null nor empty!"); + Assert.hasText(collection, "Collection name must not be null nor empty"); return new ReactiveFindSupport<>(template, domainType, returnType, collection, query); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithProjection#as(java.lang.Class) - */ @Override public FindWithQuery as(Class returnType) { - Assert.notNull(returnType, "ReturnType must not be null!"); + Assert.notNull(returnType, "ReturnType must not be null"); return new ReactiveFindSupport<>(template, domainType, returnType, collection, query); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithQuery#matching(org.springframework.data.mongodb.core.query.Query) - */ @Override public TerminatingFind matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); return new ReactiveFindSupport<>(template, domainType, returnType, collection, query); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.TerminatingFind#first() - */ @Override public Mono first() { @@ -130,10 +113,6 @@ public Mono first() { return result.next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.TerminatingFind#one() - */ @Override public Mono one() { @@ -148,66 +127,47 @@ public Mono one() { if (it.size() > 1) { return Mono.error( - new IncorrectResultSizeDataAccessException("Query " + asString() + " returned non unique result.", 1)); + new IncorrectResultSizeDataAccessException("Query " + asString() + " returned non unique result", 1)); } return Mono.just(it.get(0)); }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.TerminatingFind#all() - */ @Override public Flux all() { return doFind(null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.TerminatingFind#tail() - */ + @Override + public Mono> scroll(ScrollPosition scrollPosition) { + return template.doScroll(query.with(scrollPosition), domainType, returnType, getCollectionName()); + } + @Override public Flux tail() { return doFind(template.new TailingQueryFindPublisherPreparer(query, domainType)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithQuery#near(org.springframework.data.mongodb.core.query.NearQuery) - */ @Override public TerminatingFindNear near(NearQuery nearQuery) { return () -> template.geoNear(nearQuery, domainType, getCollectionName(), returnType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.TerminatingFind#count() - */ @Override public Mono count() { return template.count(query, domainType, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.TerminatingFind#exists() - */ @Override public Mono exists() { return template.exists(query, domainType, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.FindDistinct#distinct(java.lang.String) - */ @Override public TerminatingDistinct distinct(String field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); return new DistinctOperationSupport<>(this, field); } @@ -217,8 +177,8 @@ private Flux doFind(@Nullable FindPublisherPreparer preparer) { Document queryObject = query.getQueryObject(); Document fieldsObject = query.getFieldsObject(); - return template.doFind(getCollectionName(), queryObject, fieldsObject, domainType, returnType, - preparer != null ? preparer : getCursorPreparer(query)); + return template.doFind(getCollectionName(), ReactiveCollectionPreparerDelegate.of(query), queryObject, + fieldsObject, domainType, returnType, preparer != null ? preparer : getCursorPreparer(query)); } @SuppressWarnings("unchecked") @@ -255,35 +215,23 @@ public DistinctOperationSupport(ReactiveFindSupport delegate, String field) { this.field = field; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.DistinctWithProjection#as(java.lang.Class) - */ @Override public TerminatingDistinct as(Class resultType) { - Assert.notNull(resultType, "ResultType must not be null!"); + Assert.notNull(resultType, "ResultType must not be null"); return new DistinctOperationSupport<>((ReactiveFindSupport) delegate.as(resultType), field); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.DistinctWithQuery#matching(org.springframework.data.mongodb.core.query.Query) - */ @Override @SuppressWarnings("unchecked") public TerminatingDistinct matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); return new DistinctOperationSupport<>((ReactiveFindSupport) delegate.matching(query), field); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core..ReactiveFindOperation.TerminatingDistinct#all() - */ @Override public Flux all() { return delegate.doFindDistinct(field); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFluentMongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFluentMongoOperations.java index 0e6627f2b8..30d61771df 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFluentMongoOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFluentMongoOperations.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperation.java index 6af602a01a..ff3b690639 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupport.java index 928c4426a5..06d3c6eae7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupport.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -38,14 +38,10 @@ class ReactiveInsertOperationSupport implements ReactiveInsertOperation { this.template = template; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveInsertOperation#insert(java.lang.Class) - */ @Override public ReactiveInsert insert(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ReactiveInsertSupport<>(template, domainType, null); } @@ -63,38 +59,26 @@ static class ReactiveInsertSupport implements ReactiveInsert { this.collection = collection; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveInsertOperation.TerminatingInsert#one(java.lang.Object) - */ @Override public Mono one(T object) { - Assert.notNull(object, "Object must not be null!"); + Assert.notNull(object, "Object must not be null"); return template.insert(object, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveInsertOperation.TerminatingInsert#all(java.util.Collection) - */ @Override public Flux all(Collection objects) { - Assert.notNull(objects, "Objects must not be null!"); + Assert.notNull(objects, "Objects must not be null"); return template.insert(objects, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveInsertOperation.InsertWithCollection#inCollection(java.lang.String) - */ @Override public ReactiveInsert inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty."); + Assert.hasText(collection, "Collection must not be null nor empty"); return new ReactiveInsertSupport<>(template, domainType, collection); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperation.java index 105e85b005..798b1ca7dd 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperation.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperationSupport.java index 3ff586652e..4f0d395950 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperationSupport.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -46,7 +46,7 @@ class ReactiveMapReduceOperationSupport implements ReactiveMapReduceOperation { @Override public ReactiveMapReduceSupport mapReduce(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ReactiveMapReduceSupport<>(template, domainType, domainType, null, ALL_QUERY, null, null, null); } @@ -100,7 +100,7 @@ public Flux all() { @Override public MapReduceWithProjection inCollection(String collection) { - Assert.hasText(collection, "Collection name must not be null nor empty!"); + Assert.hasText(collection, "Collection name must not be null nor empty"); return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, reduceFunction, options); @@ -113,7 +113,7 @@ public MapReduceWithProjection inCollection(String collection) { @Override public TerminatingMapReduce matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, reduceFunction, options); @@ -126,7 +126,7 @@ public TerminatingMapReduce matching(Query query) { @Override public MapReduceWithQuery as(Class resultType) { - Assert.notNull(resultType, "ResultType must not be null!"); + Assert.notNull(resultType, "ResultType must not be null"); return new ReactiveMapReduceSupport<>(template, domainType, resultType, collection, query, mapFunction, reduceFunction, options); @@ -139,7 +139,7 @@ public MapReduceWithQuery as(Class resultType) { @Override public ReactiveMapReduce with(MapReduceOptions options) { - Assert.notNull(options, "Options must not be null! Please consider empty MapReduceOptions#options() instead."); + Assert.notNull(options, "Options must not be null Please consider empty MapReduceOptions#options() instead"); return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, reduceFunction, options); @@ -152,7 +152,7 @@ public ReactiveMapReduce with(MapReduceOptions options) { @Override public MapReduceWithReduceFunction map(String mapFunction) { - Assert.hasText(mapFunction, "MapFunction name must not be null nor empty!"); + Assert.hasText(mapFunction, "MapFunction name must not be null nor empty"); return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, reduceFunction, options); @@ -165,7 +165,7 @@ public MapReduceWithReduceFunction map(String mapFunction) { @Override public ReactiveMapReduce reduce(String reduceFunction) { - Assert.hasText(reduceFunction, "ReduceFunction name must not be null nor empty!"); + Assert.hasText(reduceFunction, "ReduceFunction name must not be null nor empty"); return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, reduceFunction, options); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java index 8177e2d7e9..89d1cd78ac 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -36,13 +36,11 @@ public class ReactiveMongoClientFactoryBean extends AbstractFactoryBean implements PersistenceExceptionTranslator { - private static final PersistenceExceptionTranslator DEFAULT_EXCEPTION_TRANSLATOR = new MongoExceptionTranslator(); - private @Nullable String connectionString; private @Nullable String host; private @Nullable Integer port; private @Nullable MongoClientSettings mongoClientSettings; - private PersistenceExceptionTranslator exceptionTranslator = DEFAULT_EXCEPTION_TRANSLATOR; + private PersistenceExceptionTranslator exceptionTranslator = MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR; /** * Configures the host to connect to. @@ -86,7 +84,13 @@ public void setMongoClientSettings(@Nullable MongoClientSettings mongoClientSett * @param exceptionTranslator */ public void setExceptionTranslator(@Nullable PersistenceExceptionTranslator exceptionTranslator) { - this.exceptionTranslator = exceptionTranslator == null ? DEFAULT_EXCEPTION_TRANSLATOR : exceptionTranslator; + this.exceptionTranslator = exceptionTranslator == null ? MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR + : exceptionTranslator; + } + + @Override + public DataAccessException translateExceptionIfPossible(RuntimeException ex) { + return exceptionTranslator.translateExceptionIfPossible(ex); } @Override @@ -115,7 +119,7 @@ protected MongoClient createInstance() throws Exception { } throw new IllegalStateException( - "Cannot create MongoClients. One of the following is required: mongoClientSettings, connectionString or host/port"); + "Cannot create MongoClients; One of the following is required: mongoClientSettings, connectionString or host/port"); } @Override @@ -123,8 +127,4 @@ protected void destroyInstance(@Nullable MongoClient instance) throws Exception instance.close(); } - @Override - public DataAccessException translateExceptionIfPossible(RuntimeException ex) { - return exceptionTranslator.translateExceptionIfPossible(ex); - } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoContext.java index b91ba4954d..8697ce4dcd 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoContext.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -70,8 +70,8 @@ public static Mono getSession() { */ public static Context setSession(Context context, Publisher session) { - Assert.notNull(context, "Context must not be null!"); - Assert.notNull(session, "Session publisher must not be null!"); + Assert.notNull(context, "Context must not be null"); + Assert.notNull(session, "Session publisher must not be null"); return context.put(SESSION_KEY, Mono.from(session)); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java index fb1c260305..90f2d2345d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,10 +25,16 @@ import org.bson.Document; import org.reactivestreams.Publisher; import org.reactivestreams.Subscription; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; import org.springframework.data.geo.GeoResult; import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationOperation; import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; import org.springframework.data.mongodb.core.aggregation.TypedAggregation; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; @@ -36,13 +42,13 @@ import org.springframework.data.mongodb.core.index.ReactiveIndexOperations; import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.lang.Nullable; -import org.springframework.transaction.reactive.TransactionalOperator; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; @@ -58,8 +64,7 @@ *

    * Implemented by {@link ReactiveMongoTemplate}. Not often used but a useful option for extensibility and testability * (as it can be easily mocked, stubbed, or be the target of a JDK proxy). Command execution using - * {@link ReactiveMongoOperations} is deferred until subscriber subscribes to the {@link Publisher}. - *

    + * {@link ReactiveMongoOperations} is deferred until subscriber subscribes to the {@link Publisher}.
    * NOTE: Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB * specific documentation to learn more about Multi * Document Transactions. @@ -91,7 +96,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { ReactiveIndexOperations indexOps(Class entityClass); /** - * Execute the a MongoDB command expressed as a JSON string. This will call the method JSON.parse that is part of the + * Execute a MongoDB command expressed as a JSON string. This will call the method JSON.parse that is part of the * MongoDB driver to convert the JSON string to a Document. Any errors that result from executing this command will be * converted into Spring's DAO exception hierarchy. * @@ -120,8 +125,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { Mono executeCommand(Document command, @Nullable ReadPreference readPreference); /** - * Executes a {@link ReactiveDatabaseCallback} translating any exceptions as necessary. - *

    + * Executes a {@link ReactiveDatabaseCallback} translating any exceptions as necessary.
    * Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. Must not @@ -132,8 +136,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { Flux execute(ReactiveDatabaseCallback action); /** - * Executes the given {@link ReactiveCollectionCallback} on the entity collection of the specified class. - *

    + * Executes the given {@link ReactiveCollectionCallback} on the entity collection of the specified class.
    * Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param entityClass class that determines the collection to use. Must not be {@literal null}. @@ -144,8 +147,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { Flux execute(Class entityClass, ReactiveCollectionCallback action); /** - * Executes the given {@link ReactiveCollectionCallback} on the collection of the given name. - *

    + * Executes the given {@link ReactiveCollectionCallback} on the collection of the given name.
    * Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param collectionName the name of the collection that specifies which {@link MongoCollection} instance will be @@ -158,8 +160,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { /** * Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession} - * provided by the given {@link Supplier} to each and every command issued against MongoDB. - *

    + * provided by the given {@link Supplier} to each and every command issued against MongoDB.
    * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use * {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the * {@link ClientSession} when done. @@ -170,15 +171,14 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { */ default ReactiveSessionScoped withSession(Supplier sessionProvider) { - Assert.notNull(sessionProvider, "SessionProvider must not be null!"); + Assert.notNull(sessionProvider, "SessionProvider must not be null"); return withSession(Mono.fromSupplier(sessionProvider)); } /** * Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding a new {@link ClientSession} - * with given {@literal sessionOptions} to each and every command issued against MongoDB. - *

    + * with given {@literal sessionOptions} to each and every command issued against MongoDB.
    * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use * {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the * {@link ClientSession} when done. @@ -192,7 +192,7 @@ default ReactiveSessionScoped withSession(Supplier sessionProvide /** * Obtain a {@link ClientSession session} bound instance of {@link ReactiveSessionScoped} binding the * {@link ClientSession} provided by the given {@link Publisher} to each and every command issued against MongoDB. - *

    + *
    * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use * {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the * {@link ClientSession} when done. @@ -204,48 +204,14 @@ default ReactiveSessionScoped withSession(Supplier sessionProvide ReactiveSessionScoped withSession(Publisher sessionProvider); /** - * Obtain a {@link ClientSession} bound instance of {@link ReactiveMongoOperations}. - *

    + * Obtain a {@link ClientSession} bound instance of {@link ReactiveMongoOperations}.
    * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. * - * @param session must not be {@literal null}. * @return {@link ClientSession} bound instance of {@link ReactiveMongoOperations}. * @since 2.1 */ ReactiveMongoOperations withSession(ClientSession session); - /** - * Initiate a new {@link ClientSession} and obtain a {@link ClientSession session} bound instance of - * {@link ReactiveSessionScoped}. Starts the transaction and adds the {@link ClientSession} to each and every command - * issued against MongoDB. - *

    - * Each {@link ReactiveSessionScoped#execute(ReactiveSessionCallback) execution} initiates a new managed transaction - * that is {@link ClientSession#commitTransaction() committed} on success. Transactions are - * {@link ClientSession#abortTransaction() rolled back} upon errors. - * - * @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}. - * @deprecated since 2.2. Use {@code @Transactional} or {@link TransactionalOperator}. - */ - @Deprecated - ReactiveSessionScoped inTransaction(); - - /** - * Obtain a {@link ClientSession session} bound instance of {@link ReactiveSessionScoped}, start the transaction and - * bind the {@link ClientSession} provided by the given {@link Publisher} to each and every command issued against - * MongoDB. - *

    - * Each {@link ReactiveSessionScoped#execute(ReactiveSessionCallback) execution} initiates a new managed transaction - * that is {@link ClientSession#commitTransaction() committed} on success. Transactions are - * {@link ClientSession#abortTransaction() rolled back} upon errors. - * - * @param sessionProvider must not be {@literal null}. - * @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}. - * @since 2.1 - * @deprecated since 2.2. Use {@code @Transactional} or {@link TransactionalOperator}. - */ - @Deprecated - ReactiveSessionScoped inTransaction(Publisher sessionProvider); - /** * Create an uncapped collection with a name based on the provided entity class. * @@ -281,6 +247,58 @@ Mono> createCollection(Class entityClass, */ Mono> createCollection(String collectionName, CollectionOptions collectionOptions); + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationOperation pipeline + * stages} on another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param stages the {@link AggregationOperation aggregation pipeline stages} defining the view content. + * @since 4.0 + */ + default Mono> createView(String name, Class source, AggregationOperation... stages) { + return createView(name, source, AggregationPipeline.of(stages)); + } + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @since 4.0 + */ + default Mono> createView(String name, Class source, AggregationPipeline pipeline) { + return createView(name, source, pipeline, null); + } + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @param options additional settings to apply when creating the view. Can be {@literal null}. + * @since 4.0 + */ + Mono> createView(String name, Class source, AggregationPipeline pipeline, + @Nullable ViewOptions options); + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given source. + * + * @param name the name of the view to create. + * @param source the name of the collection or view defining the to be created views source. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @param options additional settings to apply when creating the view. Can be {@literal null}. + * @since 4.0 + */ + Mono> createView(String name, String source, AggregationPipeline pipeline, + @Nullable ViewOptions options); + /** * A set of collection names. * @@ -292,8 +310,7 @@ Mono> createCollection(Class entityClass, * Get a {@link MongoCollection} by name. The returned collection may not exists yet (except in local memory) and is * created on first interaction with the server. Collections can be explicitly created via * {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class) - * exists} first. - *

    + * exists} first.
    * Translate any exceptions as necessary. * * @param collectionName name of the collection. @@ -302,8 +319,7 @@ Mono> createCollection(Class entityClass, Mono> getCollection(String collectionName); /** - * Check to see if a collection with a name indicated by the entity class exists. - *

    + * Check to see if a collection with a name indicated by the entity class exists.
    * Translate any exceptions as necessary. * * @param entityClass class that determines the name of the collection. Must not be {@literal null}. @@ -312,8 +328,7 @@ Mono> createCollection(Class entityClass, Mono collectionExists(Class entityClass); /** - * Check to see if a collection with a given name exists. - *

    + * Check to see if a collection with a given name exists.
    * Translate any exceptions as necessary. * * @param collectionName name of the collection. Must not be {@literal null}. @@ -322,8 +337,7 @@ Mono> createCollection(Class entityClass, Mono collectionExists(String collectionName); /** - * Drop the collection with the name indicated by the entity class. - *

    + * Drop the collection with the name indicated by the entity class.
    * Translate any exceptions as necessary. * * @param entityClass class that determines the collection to drop/delete. Must not be {@literal null}. @@ -331,8 +345,7 @@ Mono> createCollection(Class entityClass, Mono dropCollection(Class entityClass); /** - * Drop the collection with the given name. - *

    + * Drop the collection with the given name.
    * Translate any exceptions as necessary. * * @param collectionName name of the collection to drop/delete. @@ -340,11 +353,43 @@ Mono> createCollection(Class entityClass, Mono dropCollection(String collectionName); /** - * Query for a {@link Flux} of objects of type T from the collection used by the entity class. - *

    + * Returns a new {@link ReactiveBulkOperations} for the given collection.
    + * NOTE: Any additional support for field mapping, etc. is not available for {@literal update} or + * {@literal remove} operations in bulk mode due to the lack of domain type information. Use + * {@link #bulkOps(BulkMode, Class, String)} to get full type specific support. + * + * @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}. + * @param collectionName the name of the collection to work on, must not be {@literal null} or empty. + * @return {@link ReactiveBulkOperations} on the named collection + * @since 4.1 + */ + ReactiveBulkOperations bulkOps(BulkMode mode, String collectionName); + + /** + * Returns a new {@link ReactiveBulkOperations} for the given entity type. + * + * @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}. + * @param entityClass the name of the entity class, must not be {@literal null}. + * @return {@link ReactiveBulkOperations} on the named collection associated of the given entity class. + * @since 4.1 + */ + ReactiveBulkOperations bulkOps(BulkMode mode, Class entityClass); + + /** + * Returns a new {@link ReactiveBulkOperations} for the given entity type and collection name. + * + * @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}. + * @param entityType the name of the entity class. Can be {@literal null}. + * @param collectionName the name of the collection to work on, must not be {@literal null} or empty. + * @return {@link ReactiveBulkOperations} on the named collection associated with the given entity class. + * @since 4.1 + */ + ReactiveBulkOperations bulkOps(BulkMode mode, @Nullable Class entityType, String collectionName); + + /** + * Query for a {@link Flux} of objects of type T from the collection used by the entity class.
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. * @@ -354,11 +399,9 @@ Mono> createCollection(Class entityClass, Flux findAll(Class entityClass); /** - * Query for a {@link Flux} of objects of type T from the specified collection. - *

    + * Query for a {@link Flux} of objects of type T from the specified collection.
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. * @@ -370,15 +413,13 @@ Mono> createCollection(Class entityClass, /** * Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the - * specified type. - *

    + * specified type.
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned {@link Mono}. * @return the converted object. @@ -387,15 +428,13 @@ Mono> createCollection(Class entityClass, /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified - * type. - *

    + * type.
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned {@link Mono}. * @param collectionName name of the collection to retrieve the objects from. @@ -408,7 +447,7 @@ Mono> createCollection(Class entityClass, * NOTE: Any additional support for query/field mapping, etc. is not available due to the lack of * domain type information. Use {@link #exists(Query, Class, String)} to get full type specific support. * - * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param query the {@link Query} class that specifies the criteria used to find a document. * @param collectionName name of the collection to check for objects. * @return {@literal true} if the query yields a result. */ @@ -417,7 +456,7 @@ Mono> createCollection(Class entityClass, /** * Determine result of given {@link Query} contains at least one element. * - * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param query the {@link Query} class that specifies the criteria used to find a document. * @param entityClass the parametrized type. * @return {@literal true} if the query yields a result. */ @@ -426,7 +465,7 @@ Mono> createCollection(Class entityClass, /** * Determine result of given {@link Query} contains at least one element. * - * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param query the {@link Query} class that specifies the criteria used to find a document. * @param entityClass the parametrized type. Can be {@literal null}. * @param collectionName name of the collection to check for objects. * @return {@literal true} if the query yields a result. @@ -435,14 +474,13 @@ Mono> createCollection(Class entityClass, /** * Map the results of an ad-hoc query on the collection for the entity class to a {@link Flux} of the specified type. - *

    + *
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param entityClass the parametrized type of the returned {@link Flux}. Must not be {@literal null}. * @return the {@link Flux} of converted objects. @@ -450,15 +488,13 @@ Mono> createCollection(Class entityClass, Flux find(Query query, Class entityClass); /** - * Map the results of an ad-hoc query on the specified collection to a {@link Flux} of the specified type. - *

    + * Map the results of an ad-hoc query on the specified collection to a {@link Flux} of the specified type.
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param entityClass the parametrized type of the returned {@link Flux}. * @param collectionName name of the collection to retrieve the objects from. Must not be {@literal null}. @@ -466,6 +502,57 @@ Mono> createCollection(Class entityClass, */ Flux find(Query query, Class entityClass, String collectionName); + /** + * Query for a scroll of objects of type T from the specified collection.
    + * Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with + * {@link Query#limit(int)} to limit large query results for efficient scrolling.
    + * Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}. + * Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    + * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + *

    + * When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort + * sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or + * {@code null} values through {@code $gt/$lt} operators. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityType the parametrized type of the returned list. + * @return {@link Mono} emitting the converted window. + * @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid + * position. + * @since 4.1 + * @see Query#with(org.springframework.data.domain.OffsetScrollPosition) + * @see Query#with(org.springframework.data.domain.KeysetScrollPosition) + */ + Mono> scroll(Query query, Class entityType); + + /** + * Query for a window of objects of type T from the specified collection.
    + * Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with + * {@link Query#limit(int)} to limit large query results for efficient scrolling.
    + * Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}. + * Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    + * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + *

    + * When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort + * sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or + * {@code null} values through {@code $gt/$lt} operators. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityType the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from. + * @return {@link Mono} emitting the converted window. + * @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid + * position. + * @since 4.1 + * @see Query#with(org.springframework.data.domain.OffsetScrollPosition) + * @see Query#with(org.springframework.data.domain.KeysetScrollPosition) + */ + Mono> scroll(Query query, Class entityType, String collectionName); + /** * Returns a document with the given id mapped onto the given class. The collection the query is ran against will be * derived from the given target class as well. @@ -565,11 +652,9 @@ default Flux findDistinct(Query query, String field, String collection, C Flux aggregate(TypedAggregation aggregation, String collectionName, Class outputType); /** - * Execute an aggregation operation. - *

    + * Execute an aggregation operation.
    * The raw results will be mapped to the given entity class and are returned as stream. The name of the - * inputCollection is derived from the {@link TypedAggregation#getInputType() aggregation input type}. - *

    + * inputCollection is derived from the {@link TypedAggregation#getInputType() aggregation input type}.
    * Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with * {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause * {@link IllegalArgumentException}. @@ -583,11 +668,9 @@ default Flux findDistinct(Query query, String field, String collection, C Flux aggregate(TypedAggregation aggregation, Class outputType); /** - * Execute an aggregation operation. - *

    + * Execute an aggregation operation.
    * The raw results will be mapped to the given {@code ouputType}. The name of the inputCollection is derived from the - * {@code inputType}. - *

    + * {@code inputType}.
    * Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with * {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause * {@link IllegalArgumentException}. @@ -603,10 +686,8 @@ default Flux findDistinct(Query query, String field, String collection, C Flux aggregate(Aggregation aggregation, Class inputType, Class outputType); /** - * Execute an aggregation operation. - *

    - * The raw results will be mapped to the given entity class. - *

    + * Execute an aggregation operation.
    + * The raw results will be mapped to the given entity class.
    * Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with * {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause * {@link IllegalArgumentException}. @@ -676,11 +757,14 @@ default Flux findDistinct(Query query, String field, String collection, C Flux> geoNear(NearQuery near, Class entityClass, String collectionName); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. * @param entityClass the parametrized type. Must not be {@literal null}. * @return the converted object that was updated before it was updated. @@ -691,11 +775,14 @@ default Flux findDistinct(Query query, String field, String collection, C Mono findAndModify(Query query, UpdateDefinition update, Class entityClass); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. * @param entityClass the parametrized type. Must not be {@literal null}. * @param collectionName the collection to query. Must not be {@literal null}. @@ -707,12 +794,15 @@ default Flux findDistinct(Query query, String field, String collection, C Mono findAndModify(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. * @param update the {@link UpdateDefinition} to apply on matching documents. * @param options the {@link FindAndModifyOptions} holding additional information. * @param entityClass the parametrized type. @@ -725,12 +815,15 @@ default Flux findDistinct(Query query, String field, String collection, C Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @param entityClass the parametrized type. Must not be {@literal null}. @@ -746,16 +839,18 @@ Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOpt /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} * document.
    * Options are defaulted to {@link FindAndReplaceOptions#empty()}.
    * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @return the converted object that was updated or {@link Mono#empty()}, if not found. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. * @since 2.1 */ default Mono findAndReplace(Query query, T replacement) { @@ -764,14 +859,14 @@ default Mono findAndReplace(Query query, T replacement) { /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} * document.
    * Options are defaulted to {@link FindAndReplaceOptions#empty()}.
    * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param collectionName the collection to query. Must not be {@literal null}. * @return the converted object that was updated or {@link Mono#empty()}, if not found. @@ -783,18 +878,20 @@ default Mono findAndReplace(Query query, T replacement, String collection /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
    * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or * as it is after the update. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. * @since 2.1 */ default Mono findAndReplace(Query query, T replacement, FindAndReplaceOptions options) { @@ -803,13 +900,13 @@ default Mono findAndReplace(Query query, T replacement, FindAndReplaceOpt /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
    * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of @@ -819,19 +916,19 @@ default Mono findAndReplace(Query query, T replacement, FindAndReplaceOpt */ default Mono findAndReplace(Query query, T replacement, FindAndReplaceOptions options, String collectionName) { - Assert.notNull(replacement, "Replacement must not be null!"); + Assert.notNull(replacement, "Replacement must not be null"); return findAndReplace(query, replacement, options, (Class) ClassUtils.getUserClass(replacement), collectionName); } /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
    * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @param entityType the parametrized type. Must not be {@literal null}. @@ -849,13 +946,13 @@ default Mono findAndReplace(Query query, T replacement, FindAndReplaceOpt /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
    * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @param entityType the type used for mapping the {@link Query} to domain type fields and deriving the collection @@ -865,6 +962,8 @@ default Mono findAndReplace(Query query, T replacement, FindAndReplaceOpt * @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or * as it is after the update. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. * @since 2.1 */ default Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, @@ -876,13 +975,13 @@ default Mono findAndReplace(Query query, S replacement, FindAndReplace /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
    * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @param entityType the type used for mapping the {@link Query} to domain type fields and deriving the collection @@ -901,14 +1000,12 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions /** * Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the * specified type. The first document that matches the query is returned and also removed from the collection in the - * database. - *

    - * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. - *

    + * database.
    + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}.
    * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned {@link Mono}. * @return the converted object @@ -918,14 +1015,13 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified * type. The first document that matches the query is returned and also removed from the collection in the database. - *

    + *
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned {@link Mono}. * @param collectionName name of the collection to retrieve the objects from. @@ -939,18 +1035,19 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct * influence on the resulting number of documents found as those values are passed on to the server and potentially * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to - * count all matches. - *

    - * This method uses an + * count all matches.
    + * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) - * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees - * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use - * {@link #estimatedCount(Class)} for empty queries instead. + * aggregation execution} which may have an impact on performance. * * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be * {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. * @return the count of matching documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @see #exactCount(Query, Class) + * @see #estimatedCount(Class) */ Mono count(Query query, Class entityClass); @@ -961,18 +1058,17 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct * influence on the resulting number of documents found as those values are passed on to the server and potentially * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to - * count all matches. - *

    - * This method uses an + * count all matches.
    + * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) - * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees - * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use - * {@link #estimatedCount(String)} for empty queries instead. + * aggregation execution} which may have an impact on performance. * * @param query the {@link Query} class that specifies the criteria used to find documents. * @param collectionName must not be {@literal null} or empty. * @return the count of matching documents. * @see #count(Query, Class, String) + * @see #estimatedCount(String) + * @see #exactCount(Query, String) */ Mono count(Query query, String collectionName); @@ -982,42 +1078,41 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct * influence on the resulting number of documents found as those values are passed on to the server and potentially * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to - * count all matches. - *

    - * This method uses an + * count all matches.
    + * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) - * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees - * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use - * {@link #estimatedCount(String)} for empty queries instead. + * aggregation execution} which may have an impact on performance. * * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be * {@literal null}. * @param entityClass the parametrized type. Can be {@literal null}. * @param collectionName must not be {@literal null} or empty. * @return the count of matching documents. + * @see #estimatedCount(String) + * @see #exactCount(Query, Class, String) */ Mono count(Query query, @Nullable Class entityClass, String collectionName); /** * Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type}, - * based on collection statistics. - *

    + * based on collection statistics.
    * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside * transactions. * * @param entityClass must not be {@literal null}. * @return a {@link Mono} emitting the estimated number of documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. * @since 3.1 */ default Mono estimatedCount(Class entityClass) { - Assert.notNull(entityClass, "Entity class must not be null!"); + Assert.notNull(entityClass, "Entity class must not be null"); return estimatedCount(getCollectionName(entityClass)); } /** - * Estimate the number of documents in the given collection based on collection statistics. - *

    + * Estimate the number of documents in the given collection based on collection statistics.
    * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside * transactions. * @@ -1028,34 +1123,107 @@ default Mono estimatedCount(Class entityClass) { Mono estimatedCount(String collectionName); /** - * Insert the object into the collection for the entity type of the object to save. - *

    - * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. - *

    - * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a - * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * Returns the number of documents for the given {@link Query} by querying the collection of the given entity class. + *
    + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
    + * This method uses an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(Class)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be + * {@literal null}. + * @param entityClass class that determines the collection to use. Must not be {@literal null}. + * @return the count of matching documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.4 + */ + default Mono exactCount(Query query, Class entityClass) { + return exactCount(query, entityClass, getCollectionName(entityClass)); + } + + /** + * Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query} + * must solely consist of document field references as we lack type information to map potential property references + * onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
    + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
    + * This method uses an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(String)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. + * @param collectionName must not be {@literal null} or empty. + * @return the count of matching documents. + * @see #count(Query, Class, String) + * @since 3.4 + */ + default Mono exactCount(Query query, String collectionName) { + return exactCount(query, null, collectionName); + } + + /** + * Returns the number of documents for the given {@link Query} by querying the given collection using the given entity + * class to map the given {@link Query}.
    + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
    + * This method uses an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(String)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be + * {@literal null}. + * @param entityClass the parametrized type. Can be {@literal null}. + * @param collectionName must not be {@literal null} or empty. + * @return the count of matching documents. + * @since 3.4 + */ + Mono exactCount(Query query, @Nullable Class entityClass, String collectionName); + + /** + * Insert the object into the collection for the entity type of the object to save.
    + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
    + * If your object has an {@literal Id} property which holds a {@literal null} value, it will be set with the generated + * Id from MongoDB. If your Id property is a String then MongoDB ObjectId will be used to populate that string. + * Otherwise, the conversion from ObjectId to your property type will be handled by Spring's BeanWrapper class that + * leverages Type Conversion API. See * Spring's - * Type Conversion" for more details. - *

    + * Type Conversion" for more details.
    * Insert is used to initially store the object into the database. To update an existing object use the save method. - *

    + *

    + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + *

    * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @return the inserted object. * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. */ Mono insert(T objectToSave); /** - * Insert the object into the specified collection. - *

    + * Insert the object into the specified collection.
    * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * Insert is used to initially store the object into the database. To update an existing object use the save method. - *

    + *

    + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + *

    * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. @@ -1067,15 +1235,27 @@ default Mono estimatedCount(Class entityClass) { /** * Insert a Collection of objects into a collection in a single batch write to the database. + *

    + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

    + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param batchToSave the batch of objects to save. Must not be {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. - * @return the inserted objects . + * @return the inserted objects. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. */ Flux insert(Collection batchToSave, Class entityClass); /** * Insert a batch of objects into the specified collection in a single batch write to the database. + *

    + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

    + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param batchToSave the list of objects to save. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. @@ -1086,24 +1266,31 @@ default Mono estimatedCount(Class entityClass) { /** * Insert a mixed Collection of objects into a database collection determining the collection name to use based on the * class. + *

    + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

    + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param objectsToSave the list of objects to save. Must not be {@literal null}. * @return the saved objects. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} for the given objects. */ Flux insertAll(Collection objectsToSave); /** - * Insert the object into the collection for the entity type of the object to save. - *

    - * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. - *

    + * Insert the object into the collection for the entity type of the object to save.
    + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
    * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See * Spring's - * Type Conversion" for more details. - *

    + * Type Conversion" for more details.
    * Insert is used to initially store the object into the database. To update an existing object use the save method. + *

    + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @return the inserted objects. @@ -1112,15 +1299,27 @@ default Mono estimatedCount(Class entityClass) { /** * Insert a Collection of objects into a collection in a single batch write to the database. + *

    + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

    + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param batchToSave the publisher which provides objects to save. Must not be {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. * @return the inserted objects. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} for the type. */ Flux insertAll(Mono> batchToSave, Class entityClass); /** * Insert objects into the specified collection in a single batch write to the database. + *

    + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

    + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param batchToSave the publisher which provides objects to save. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. @@ -1131,6 +1330,11 @@ default Mono estimatedCount(Class entityClass) { /** * Insert a mixed Collection of objects into a database collection determining the collection name to use based on the * class. + *

    + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

    + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param objectsToSave the publisher which provides objects to save. Must not be {@literal null}. * @return the inserted objects. @@ -1139,94 +1343,116 @@ default Mono estimatedCount(Class entityClass) { /** * Save the object to the collection for the entity type of the object to save. This will perform an insert if the - * object is not already present, that is an 'upsert'. - *

    + * object is not already present, that is an 'upsert'.
    * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See * Spring's * Type Conversion" for more details. - *

    + *

    + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. + *

    * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @return the saved object. * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. */ Mono save(T objectToSave); /** * Save the object to the specified collection. This will perform an insert if the object is not already present, that - * is an 'upsert'. - *

    + * is an 'upsert'.
    * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's Type - * Conversion" for more details. + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * Spring's Type + * Conversion for more details. + *

    + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. * @return the saved object. * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. */ Mono save(T objectToSave, String collectionName); /** * Save the object to the collection for the entity type of the object to save. This will perform an insert if the - * object is not already present, that is an 'upsert'. - *

    + * object is not already present, that is an 'upsert'.
    * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See - * Spring's - * Type Conversion" for more details. + * Spring's Type + * Conversion for more details. + *

    + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @return the saved object. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. */ Mono save(Mono objectToSave); /** * Save the object to the specified collection. This will perform an insert if the object is not already present, that - * is an 'upsert'. - *

    + * is an 'upsert'.
    * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's Type - * Conversion" for more details. + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * Spring's Type + * Conversion for more details. + *

    + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. * - * @param objectToSave the object to store in the collection. Must not be {@literal null}. + * @param objectToSave the object to store in the collReactiveMongoOperationsection. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. * @return the saved object. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. */ Mono save(Mono objectToSave, String collectionName); /** * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by - * combining the query document and the update document.
    + * combining the query document and the update document. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + *

    * NOTE: {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}. * Use {@link #findAndModify(Query, UpdateDefinition, Class)} instead. * - * @param query the query document that specifies the criteria used to select a record to be upserted. Must not be + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be * {@literal null}. * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate * the existing object. Must not be {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. * @since 3.0 * @see Update * @see AggregationUpdate @@ -1240,7 +1466,7 @@ default Mono estimatedCount(Class entityClass) { * domain type information. Use {@link #upsert(Query, UpdateDefinition, Class, String)} to get full type specific * support. * - * @param query the query document that specifies the criteria used to select a record to be upserted. Must not be + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be * {@literal null}. * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate * the existing object. Must not be {@literal null}. @@ -1255,8 +1481,11 @@ default Mono estimatedCount(Class entityClass) { /** * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by * combining the query document and the update document. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be upserted. Must not be + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be * {@literal null}. * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate * the existing object. Must not be {@literal null}. @@ -1271,16 +1500,20 @@ default Mono estimatedCount(Class entityClass) { /** * Updates the first object that is found in the collection of the entity class that matches the query document with - * the provided update document.
    - * NOTE: {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}. - * Use {@link #findAndModify(Query, UpdateDefinition, Class)} instead. + * the provided update document. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be - * {@literal null}. + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate * the existing. Must not be {@literal null}. * @param entityClass class that determines the collection to use. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. * @since 3.0 * @see Update * @see AggregationUpdate @@ -1292,12 +1525,11 @@ default Mono estimatedCount(Class entityClass) { * the provided updated document.
    * NOTE: Any additional support for field mapping, versions, etc. is not available due to the lack of * domain type information. Use {@link #updateFirst(Query, UpdateDefinition, Class, String)} to get full type specific - * support.
    - * NOTE: {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}. - * Use {@link #findAndModify(Query, UpdateDefinition, Class, String)} instead. + * support. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be - * {@literal null}. + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate * the existing. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. @@ -1310,10 +1542,14 @@ default Mono estimatedCount(Class entityClass) { /** * Updates the first object that is found in the specified collection that matches the query document criteria with - * the provided updated document.
    + * the provided updated document. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be - * {@literal null}. + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate * the existing. Must not be {@literal null}. * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. @@ -1328,14 +1564,19 @@ default Mono estimatedCount(Class entityClass) { /** * Updates all objects that are found in the collection for the entity class that matches the query document criteria * with the provided updated document. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be * {@literal null}. * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate * the existing. Must not be {@literal null}. * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. * @since 3.0 + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. * @see Update * @see AggregationUpdate */ @@ -1348,7 +1589,7 @@ default Mono estimatedCount(Class entityClass) { * domain type information. Use {@link #updateMulti(Query, UpdateDefinition, Class, String)} to get full type specific * support. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be * {@literal null}. * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate * the existing. Must not be {@literal null}. @@ -1363,8 +1604,11 @@ default Mono estimatedCount(Class entityClass) { /** * Updates all objects that are found in the collection for the entity class that matches the query document criteria * with the provided updated document. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be * {@literal null}. * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate * the existing. Must not be {@literal null}. @@ -1378,10 +1622,13 @@ default Mono estimatedCount(Class entityClass) { Mono updateMulti(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** - * Remove the given object from the collection by id. + * Remove the given object from the collection by {@literal id} and (if applicable) its + * {@link org.springframework.data.annotation.Version}. * * @param object must not be {@literal null}. * @return the {@link DeleteResult} which lets you access the results of the previous delete. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. */ Mono remove(Object object); @@ -1389,45 +1636,54 @@ default Mono estimatedCount(Class entityClass) { * Removes the given object from the given collection. * * @param object must not be {@literal null}. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. */ Mono remove(Object object, String collectionName); /** - * Remove the given object from the collection by id. + * Remove the given object from the collection by {@literal id} and (if applicable) its + * {@link org.springframework.data.annotation.Version}. * * @param objectToRemove must not be {@literal null}. * @return the {@link DeleteResult} which lets you access the results of the previous delete. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. */ Mono remove(Mono objectToRemove); /** - * Removes the given object from the given collection. + * Removes the given object from the given collection by {@literal id} and (if applicable) its + * {@link org.springframework.data.annotation.Version}. * * @param objectToRemove must not be {@literal null}. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. */ Mono remove(Mono objectToRemove, String collectionName); /** - * Remove all documents that match the provided query document criteria from the the collection used to store the + * Remove all documents that match the provided query document criteria from the collection used to store the * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. * - * @param query the query document that specifies the criteria used to remove a record. + * @param query the query document that specifies the criteria used to remove a document. * @param entityClass class that determines the collection to use. * @return the {@link DeleteResult} which lets you access the results of the previous delete. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. */ Mono remove(Query query, Class entityClass); /** - * Remove all documents that match the provided query document criteria from the the collection used to store the + * Remove all documents that match the provided query document criteria from the collection used to store the * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. * - * @param query the query document that specifies the criteria used to remove a record. + * @param query the query document that specifies the criteria used to remove a document. * @param entityClass class of the pojo to be operated on. Can be {@literal null}. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. */ Mono remove(Query query, @Nullable Class entityClass, String collectionName); @@ -1438,8 +1694,9 @@ default Mono estimatedCount(Class entityClass) { * NOTE: Any additional support for field mapping is not available due to the lack of domain type * information. Use {@link #remove(Query, Class, String)} to get full type specific support. * - * @param query the query document that specifies the criteria used to remove a record. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param query the query document that specifies the criteria used to remove a document. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. */ Mono remove(Query query, String collectionName); @@ -1450,7 +1707,8 @@ default Mono estimatedCount(Class entityClass) { * information. Use {@link #findAllAndRemove(Query, Class, String)} to get full type specific support. * * @param query the query document that specifies the criteria used to find and remove documents. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link Flux} converted objects deleted by this operation. */ Flux findAllAndRemove(Query query, String collectionName); @@ -1461,37 +1719,113 @@ default Mono estimatedCount(Class entityClass) { * @param query the query document that specifies the criteria used to find and remove documents. * @param entityClass class of the pojo to be operated on. * @return the {@link Flux} converted objects deleted by this operation. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. */ Flux findAllAndRemove(Query query, Class entityClass); /** - * Returns and removes all documents that match the provided query document criteria from the the collection used to - * store the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in - * the query. + * Returns and removes all documents that match the provided query document criteria from the collection used to store + * the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the + * query. * * @param query the query document that specifies the criteria used to find and remove documents. * @param entityClass class of the pojo to be operated on. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link Flux} converted objects deleted by this operation. */ Flux findAllAndRemove(Query query, Class entityClass, String collectionName); + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document.
    + * The collection name is derived from the {@literal replacement} type.
    + * Options are defaulted to {@link ReplaceOptions#none()}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 4.2 + */ + default Mono replace(Query query, T replacement) { + return replace(query, replacement, ReplaceOptions.none()); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document. Options are defaulted to {@link ReplaceOptions#none()}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @since 4.2 + */ + default Mono replace(Query query, T replacement, String collectionName) { + return replace(query, replacement, ReplaceOptions.none(), collectionName); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document taking {@link ReplaceOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document.The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link ReplaceOptions} holding additional information. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 4.2 + */ + default Mono replace(Query query, T replacement, ReplaceOptions options) { + return replace(query, replacement, options, getCollectionName(ClassUtils.getUserClass(replacement))); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document taking {@link ReplaceOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may * + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link ReplaceOptions} holding additional information. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 4.2 + */ + Mono replace(Query query, T replacement, ReplaceOptions options, String collectionName); + /** * Map the results of an ad-hoc query on the collection for the entity class to a stream of objects of the specified * type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite * stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is - * {@link Subscription#cancel() canceled}. - *

    + * {@link Subscription#cancel() canceled}.
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned {@link Flux}. * @return the {@link Flux} of converted objects. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. */ Flux tail(Query query, Class entityClass); @@ -1499,15 +1833,13 @@ default Mono estimatedCount(Class entityClass) { * Map the results of an ad-hoc query on the collection for the entity class to a stream of objects of the specified * type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite * stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is - * {@link Subscription#cancel() canceled}. - *

    + * {@link Subscription#cancel() canceled}.
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned {@link Flux}. * @param collectionName name of the collection to retrieve the objects from. @@ -1519,11 +1851,9 @@ default Mono estimatedCount(Class entityClass) { * Subscribe to a MongoDB Change Stream for all events in * the configured default database via the reactive infrastructure. Use the optional provided {@link Aggregation} to * filter events. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is - * {@link Subscription#cancel() canceled}. - *

    + * {@link Subscription#cancel() canceled}.
    * The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the - * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload. - *

    + * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
    * Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken} * for resuming change streams. * @@ -1543,11 +1873,9 @@ default Flux> changeStream(ChangeStreamOptions options, * Subscribe to a MongoDB Change Stream for all events in * the given collection via the reactive infrastructure. Use the optional provided {@link Aggregation} to filter * events. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is - * {@link Subscription#cancel() canceled}. - *

    + * {@link Subscription#cancel() canceled}.
    * The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the - * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload. - *

    + * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
    * Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken} * for resuming change streams. * @@ -1568,11 +1896,9 @@ default Flux> changeStream(@Nullable String collectionN /** * Subscribe to a MongoDB Change Stream via the reactive * infrastructure. Use the optional provided {@link Aggregation} to filter events. The stream will not be completed - * unless the {@link org.reactivestreams.Subscription} is {@link Subscription#cancel() canceled}. - *

    + * unless the {@link org.reactivestreams.Subscription} is {@link Subscription#cancel() canceled}.
    * The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the - * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload. - *

    + * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
    * Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken} * for resuming change streams. * @@ -1602,7 +1928,9 @@ Flux> changeStream(@Nullable String database, @Nullable * @param options additional options like output collection. Must not be {@literal null}. * @return a {@link Flux} emitting the result document sequence. Never {@literal null}. * @since 2.1 + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated Flux mapReduce(Query filterQuery, Class domainType, Class resultType, String mapFunction, String reduceFunction, MapReduceOptions options); @@ -1620,7 +1948,9 @@ Flux mapReduce(Query filterQuery, Class domainType, Class resultTyp * @param options additional options like output collection. Must not be {@literal null}. * @return a {@link Flux} emitting the result document sequence. Never {@literal null}. * @since 2.1 + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated Flux mapReduce(Query filterQuery, Class domainType, String inputCollectionName, Class resultType, String mapFunction, String reduceFunction, MapReduceOptions options); @@ -1636,6 +1966,7 @@ Flux mapReduce(Query filterQuery, Class domainType, String inputCollec * * @param entityClass must not be {@literal null}. * @return never {@literal null}. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be derived from the type. * @since 2.1 */ String getCollectionName(Class entityClass); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java index 614894f3b6..b74ec6aa1c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,18 +32,20 @@ import java.util.Map; import java.util.Optional; import java.util.concurrent.TimeUnit; +import java.util.function.BiFunction; +import java.util.function.BiPredicate; import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.bson.BsonValue; import org.bson.Document; import org.bson.conversions.Bson; import org.bson.types.ObjectId; import org.reactivestreams.Publisher; import org.reactivestreams.Subscriber; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; @@ -57,6 +59,8 @@ import org.springframework.dao.OptimisticLockingFailureException; import org.springframework.dao.support.PersistenceExceptionTranslator; import org.springframework.data.convert.EntityReader; +import org.springframework.data.domain.OffsetScrollPosition; +import org.springframework.data.domain.Window; import org.springframework.data.geo.Distance; import org.springframework.data.geo.GeoResult; import org.springframework.data.geo.Metric; @@ -69,6 +73,9 @@ import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; import org.springframework.data.mongodb.ReactiveMongoDatabaseUtils; import org.springframework.data.mongodb.SessionSynchronization; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.CollectionPreparerSupport.ReactiveCollectionPreparerDelegate; +import org.springframework.data.mongodb.core.DefaultReactiveBulkOperations.ReactiveBulkOperationContext; import org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity; import org.springframework.data.mongodb.core.QueryOperations.AggregationDefinition; import org.springframework.data.mongodb.core.QueryOperations.CountContext; @@ -76,19 +83,20 @@ import org.springframework.data.mongodb.core.QueryOperations.DistinctQueryContext; import org.springframework.data.mongodb.core.QueryOperations.QueryContext; import org.springframework.data.mongodb.core.QueryOperations.UpdateContext; +import org.springframework.data.mongodb.core.ScrollUtils.KeysetScrollQuery; import org.springframework.data.mongodb.core.aggregation.Aggregation; import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions.Builder; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; import org.springframework.data.mongodb.core.aggregation.PrefixingDelegatingAggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.TypedAggregation; import org.springframework.data.mongodb.core.convert.DbRefResolver; -import org.springframework.data.mongodb.core.convert.JsonSchemaMapper; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.data.mongodb.core.convert.MongoCustomConversions; -import org.springframework.data.mongodb.core.convert.MongoJsonSchemaMapper; import org.springframework.data.mongodb.core.convert.MongoWriter; import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.convert.QueryMapper; @@ -96,6 +104,7 @@ import org.springframework.data.mongodb.core.index.MongoMappingEventPublisher; import org.springframework.data.mongodb.core.index.ReactiveIndexOperations; import org.springframework.data.mongodb.core.index.ReactiveMongoPersistentEntityIndexCreator; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; @@ -105,14 +114,12 @@ import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Meta; -import org.springframework.data.mongodb.core.query.Meta.CursorOption; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; -import org.springframework.data.mongodb.core.validation.Validator; -import org.springframework.data.mongodb.util.BsonUtils; -import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; +import org.springframework.data.projection.EntityProjection; import org.springframework.data.util.Optionals; import org.springframework.lang.Nullable; import org.springframework.util.Assert; @@ -128,7 +135,16 @@ import com.mongodb.MongoException; import com.mongodb.ReadPreference; import com.mongodb.WriteConcern; -import com.mongodb.client.model.*; +import com.mongodb.client.model.CountOptions; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.CreateViewOptions; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.EstimatedDocumentCountOptions; +import com.mongodb.client.model.FindOneAndDeleteOptions; +import com.mongodb.client.model.FindOneAndReplaceOptions; +import com.mongodb.client.model.FindOneAndUpdateOptions; +import com.mongodb.client.model.ReturnDocument; +import com.mongodb.client.model.UpdateOptions; import com.mongodb.client.model.changestream.FullDocument; import com.mongodb.client.result.DeleteResult; import com.mongodb.client.result.InsertOneResult; @@ -149,22 +165,32 @@ * extract results. This class executes BSON queries or updates, initiating iteration over {@link FindPublisher} and * catching MongoDB exceptions and translating them to the generic, more informative exception hierarchy defined in the * org.springframework.dao package. Can be used within a service implementation via direct instantiation with a - * {@link SimpleReactiveMongoDatabaseFactory} reference, or get prepared in an application context and given to services - * as bean reference. Note: The {@link SimpleReactiveMongoDatabaseFactory} should always be configured as a bean in the - * application context, in the first case given to the service directly, in the second case to the prepared template. + * {@link ReactiveMongoDatabaseFactory} reference, or get prepared in an application context and given to services as + * bean reference. + *

    + * Note: The {@link ReactiveMongoDatabaseFactory} should always be configured as a bean in the application context, in + * the first case given to the service directly, in the second case to the prepared template. + *

    {@link ReadPreference} and {@link com.mongodb.ReadConcern}

    + *

    + * {@code ReadPreference} and {@code ReadConcern} are generally considered from {@link Query} and + * {@link AggregationOptions} objects for the action to be executed on a particular {@link MongoCollection}. + *

    + * You can also set the default {@link #setReadPreference(ReadPreference) ReadPreference} on the template level to + * generally apply a {@link ReadPreference}. * * @author Mark Paluch * @author Christoph Strobl * @author Roman Puchkovskiy * @author Mathieu Ouellet * @author Yadhukrishna S Pai + * @author Florian Lüdiger * @since 2.0 */ public class ReactiveMongoTemplate implements ReactiveMongoOperations, ApplicationContextAware { public static final DbRefResolver NO_OP_REF_RESOLVER = NoOpDbRefResolver.INSTANCE; - private static final Logger LOGGER = LoggerFactory.getLogger(ReactiveMongoTemplate.class); + private static final Log LOGGER = LogFactory.getLog(ReactiveMongoTemplate.class); private static final WriteResultChecking DEFAULT_WRITE_RESULT_CHECKING = WriteResultChecking.NONE; private final MongoConverter mongoConverter; @@ -173,12 +199,11 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati private final PersistenceExceptionTranslator exceptionTranslator; private final QueryMapper queryMapper; private final UpdateMapper updateMapper; - private final JsonSchemaMapper schemaMapper; - private final SpelAwareProxyProjectionFactory projectionFactory; private final ApplicationListener> indexCreatorListener; private final EntityOperations operations; private final PropertyOperations propertyOperations; private final QueryOperations queryOperations; + private final EntityLifecycleEventDelegate eventDelegate; private @Nullable WriteConcern writeConcern; private WriteConcernResolver writeConcernResolver = DefaultWriteConcernResolver.INSTANCE; @@ -190,6 +215,8 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati private SessionSynchronization sessionSynchronization = SessionSynchronization.ON_ACTUAL_TRANSACTION; + private CountExecution countExecution = this::doExactCount; + /** * Constructor used for a basic template configuration. * @@ -233,28 +260,25 @@ public ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory, public ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory, @Nullable MongoConverter mongoConverter, Consumer subscriptionExceptionHandler) { - Assert.notNull(mongoDatabaseFactory, "ReactiveMongoDatabaseFactory must not be null!"); + Assert.notNull(mongoDatabaseFactory, "ReactiveMongoDatabaseFactory must not be null"); this.mongoDatabaseFactory = mongoDatabaseFactory; this.exceptionTranslator = mongoDatabaseFactory.getExceptionTranslator(); this.mongoConverter = mongoConverter == null ? getDefaultMongoConverter() : mongoConverter; this.queryMapper = new QueryMapper(this.mongoConverter); this.updateMapper = new UpdateMapper(this.mongoConverter); - this.schemaMapper = new MongoJsonSchemaMapper(this.mongoConverter); - this.projectionFactory = new SpelAwareProxyProjectionFactory(); this.indexCreatorListener = new IndexCreatorEventListener(subscriptionExceptionHandler); // We always have a mapping context in the converter, whether it's a simple one or not this.mappingContext = this.mongoConverter.getMappingContext(); - this.operations = new EntityOperations(this.mappingContext); - this.propertyOperations = new PropertyOperations(this.mappingContext); + this.operations = new EntityOperations(this.mongoConverter, this.queryMapper); + this.propertyOperations = new PropertyOperations(this.mongoConverter.getMappingContext()); this.queryOperations = new QueryOperations(queryMapper, updateMapper, operations, propertyOperations, mongoDatabaseFactory); + this.eventDelegate = new EntityLifecycleEventDelegate(); // We create indexes based on mapping events - if (this.mappingContext instanceof MongoMappingContext) { - - MongoMappingContext mongoMappingContext = (MongoMappingContext) this.mappingContext; + if (this.mappingContext instanceof MongoMappingContext mongoMappingContext) { if (mongoMappingContext.isAutoIndexCreation()) { this.indexCreator = new ReactiveMongoPersistentEntityIndexCreator(mongoMappingContext, this::indexOps); @@ -274,8 +298,6 @@ private ReactiveMongoTemplate(ReactiveMongoDatabaseFactory dbFactory, ReactiveMo this.mongoConverter = that.mongoConverter; this.queryMapper = that.queryMapper; this.updateMapper = that.updateMapper; - this.schemaMapper = that.schemaMapper; - this.projectionFactory = that.projectionFactory; this.indexCreator = that.indexCreator; this.indexCreatorListener = that.indexCreatorListener; this.mappingContext = that.mappingContext; @@ -283,6 +305,7 @@ private ReactiveMongoTemplate(ReactiveMongoDatabaseFactory dbFactory, ReactiveMo this.propertyOperations = that.propertyOperations; this.sessionSynchronization = that.sessionSynchronization; this.queryOperations = that.queryOperations; + this.eventDelegate = that.eventDelegate; } private void onCheckForIndexes(MongoPersistentEntity entity, Consumer subscriptionExceptionHandler) { @@ -335,33 +358,39 @@ public void setReadPreference(ReadPreference readPreference) { this.readPreference = readPreference; } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext) + /** + * Configure whether lifecycle events such as {@link AfterLoadEvent}, {@link BeforeSaveEvent}, etc. should be + * published or whether emission should be suppressed. Enabled by default. + * + * @param enabled {@code true} to enable entity lifecycle events; {@code false} to disable entity lifecycle events. + * @since 4.0 + * @see MongoMappingEvent */ + public void setEntityLifecycleEventsEnabled(boolean enabled) { + this.eventDelegate.setEventsEnabled(enabled); + } + + @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { prepareIndexCreator(applicationContext); eventPublisher = applicationContext; + eventDelegate.setPublisher(eventPublisher); if (entityCallbacks == null) { setEntityCallbacks(ReactiveEntityCallbacks.create(applicationContext)); } - if (mappingContext instanceof ApplicationEventPublisherAware) { - ((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher); + if (mappingContext instanceof ApplicationEventPublisherAware applicationEventPublisherAware) { + applicationEventPublisherAware.setApplicationEventPublisher(eventPublisher); } - - projectionFactory.setBeanFactory(applicationContext); - projectionFactory.setBeanClassLoader(applicationContext.getClassLoader()); } /** * Set the {@link ReactiveEntityCallbacks} instance to use when invoking * {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the - * {@link ReactiveBeforeSaveCallback}. - *

    + * {@link ReactiveBeforeSaveCallback}.
    * Overrides potentially existing {@link ReactiveEntityCallbacks}. * * @param entityCallbacks must not be {@literal null}. @@ -370,10 +399,53 @@ public void setApplicationContext(ApplicationContext applicationContext) throws */ public void setEntityCallbacks(ReactiveEntityCallbacks entityCallbacks) { - Assert.notNull(entityCallbacks, "EntityCallbacks must not be null!"); + Assert.notNull(entityCallbacks, "EntityCallbacks must not be null"); this.entityCallbacks = entityCallbacks; } + /** + * Configure whether to use estimated count. Defaults to exact counting. + * + * @param enabled use {@link com.mongodb.client.MongoCollection#estimatedDocumentCount()} for unpaged and empty + * {@link Query queries} if {@code true}. + * @since 3.4 + */ + public void useEstimatedCount(boolean enabled) { + useEstimatedCount(enabled, this::countCanBeEstimated); + } + + /** + * Configure whether to use estimated count based on the given {@link BiPredicate estimationFilter}. + * + * @param enabled use {@link com.mongodb.client.MongoCollection#estimatedDocumentCount()} for unpaged and empty + * {@link Query queries} if {@code true}. + * @param estimationFilter the {@link BiPredicate filter}. + * @since 3.4 + */ + private void useEstimatedCount(boolean enabled, BiFunction> estimationFilter) { + + if (enabled) { + + this.countExecution = (collectionName, filter, options) -> { + + return estimationFilter.apply(filter, options).flatMap(canEstimate -> { + if (!canEstimate) { + return doExactCount(collectionName, filter, options); + } + + EstimatedDocumentCountOptions estimatedDocumentCountOptions = new EstimatedDocumentCountOptions(); + if (options.getMaxTime(TimeUnit.MILLISECONDS) > 0) { + estimatedDocumentCountOptions.maxTime(options.getMaxTime(TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS); + } + + return doEstimatedCount(collectionName, estimatedDocumentCountOptions); + }); + }; + } else { + this.countExecution = this::doExactCount; + } + } + /** * Inspects the given {@link ApplicationContext} for {@link ReactiveMongoPersistentEntityIndexCreator} and those in * turn if they were registered for the current {@link MappingContext}. If no creator for the current @@ -395,8 +467,8 @@ private void prepareIndexCreator(ApplicationContext context) { } } - if (context instanceof ConfigurableApplicationContext) { - ((ConfigurableApplicationContext) context).addApplicationListener(indexCreatorListener); + if (context instanceof ConfigurableApplicationContext configurableApplicationContext) { + configurableApplicationContext.addApplicationListener(indexCreatorListener); } } @@ -405,94 +477,66 @@ private void prepareIndexCreator(ApplicationContext context) { * * @return */ + @Override public MongoConverter getConverter() { return this.mongoConverter; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#reactiveIndexOps(java.lang.String) - */ + @Override public ReactiveIndexOperations indexOps(String collectionName) { return new DefaultReactiveIndexOperations(this, collectionName, this.queryMapper); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#reactiveIndexOps(java.lang.Class) - */ + @Override public ReactiveIndexOperations indexOps(Class entityClass) { return new DefaultReactiveIndexOperations(this, getCollectionName(entityClass), this.queryMapper, entityClass); } + @Override public String getCollectionName(Class entityClass) { return operations.determineCollectionName(entityClass); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#executeCommand(java.lang.String) - */ + @Override public Mono executeCommand(String jsonCommand) { - Assert.notNull(jsonCommand, "Command must not be empty!"); + Assert.notNull(jsonCommand, "Command must not be empty"); return executeCommand(Document.parse(jsonCommand)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#executeCommand(org.bson.Document) - */ + @Override public Mono executeCommand(Document command) { return executeCommand(command, null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#executeCommand(org.bson.Document, com.mongodb.ReadPreference) - */ + @Override public Mono executeCommand(Document command, @Nullable ReadPreference readPreference) { - Assert.notNull(command, "Command must not be null!"); + Assert.notNull(command, "Command must not be null"); return createFlux(db -> readPreference != null ? db.runCommand(command, readPreference, Document.class) : db.runCommand(command, Document.class)).next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#execute(java.lang.Class, org.springframework.data.mongodb.core.ReactiveCollectionCallback) - */ @Override public Flux execute(Class entityClass, ReactiveCollectionCallback action) { return createFlux(getCollectionName(entityClass), action); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#execute(org.springframework.data.mongodb.core.ReactiveDbCallback) - */ @Override public Flux execute(ReactiveDatabaseCallback action) { return createFlux(action); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#execute(java.lang.String, org.springframework.data.mongodb.core.ReactiveCollectionCallback) - */ + @Override public Flux execute(String collectionName, ReactiveCollectionCallback callback) { - Assert.notNull(callback, "ReactiveCollectionCallback must not be null!"); + Assert.notNull(callback, "ReactiveCollectionCallback must not be null"); return createFlux(collectionName, callback); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#withSession(org.reactivestreams.Publisher, java.util.function.Consumer) - */ @Override public ReactiveSessionScoped withSession(Publisher sessionProvider) { @@ -525,47 +569,6 @@ public void setSessionSynchronization(SessionSynchronization sessionSynchronizat this.sessionSynchronization = sessionSynchronization; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#inTransaction() - */ - @Override - public ReactiveSessionScoped inTransaction() { - return inTransaction( - mongoDatabaseFactory.getSession(ClientSessionOptions.builder().causallyConsistent(true).build())); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#inTransaction(org.reactivestreams.Publisher) - */ - @Override - public ReactiveSessionScoped inTransaction(Publisher sessionProvider) { - - Mono cachedSession = Mono.from(sessionProvider).cache(); - - return new ReactiveSessionScoped() { - - @Override - public Flux execute(ReactiveSessionCallback action, Consumer doFinally) { - - return cachedSession.flatMapMany(session -> { - - if (!session.hasActiveTransaction()) { - session.startTransaction(); - } - - return Flux.usingWhen(Mono.just(session), // - s -> ReactiveMongoTemplate.this.withSession(action, s), // - ClientSession::commitTransaction, // - (sess, err) -> sess.abortTransaction(), // - ClientSession::commitTransaction) // - .doFinally(signalType -> doFinally.accept(session)); - }); - } - }; - } - private Flux withSession(ReactiveSessionCallback action, ClientSession session) { ReactiveSessionBoundMongoTemplate operations = new ReactiveSessionBoundMongoTemplate(session, @@ -575,18 +578,11 @@ private Flux withSession(ReactiveSessionCallback action, ClientSession .contextWrite(ctx -> ReactiveMongoContext.setSession(ctx, Mono.just(session))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#withSession(com.mongodb.session.ClientSession) - */ + @Override public ReactiveMongoOperations withSession(ClientSession session) { return new ReactiveSessionBoundMongoTemplate(session, ReactiveMongoTemplate.this); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#withSession(com.mongodb.ClientSessionOptions) - */ @Override public ReactiveSessionScoped withSession(ClientSessionOptions sessionOptions) { return withSession(mongoDatabaseFactory.getSession(sessionOptions)); @@ -601,7 +597,7 @@ public ReactiveSessionScoped withSession(ClientSessionOptions sessionOptions) { */ public Flux createFlux(ReactiveDatabaseCallback callback) { - Assert.notNull(callback, "ReactiveDatabaseCallback must not be null!"); + Assert.notNull(callback, "ReactiveDatabaseCallback must not be null"); return Mono.defer(this::doGetDatabase).flatMapMany(database -> callback.doInDB(prepareDatabase(database))) .onErrorMap(translateException()); @@ -616,7 +612,7 @@ public Flux createFlux(ReactiveDatabaseCallback callback) { */ public Mono createMono(ReactiveDatabaseCallback callback) { - Assert.notNull(callback, "ReactiveDatabaseCallback must not be null!"); + Assert.notNull(callback, "ReactiveDatabaseCallback must not be null"); return Mono.defer(this::doGetDatabase).flatMap(database -> Mono.from(callback.doInDB(prepareDatabase(database)))) .onErrorMap(translateException()); @@ -631,8 +627,8 @@ public Mono createMono(ReactiveDatabaseCallback callback) { */ public Flux createFlux(String collectionName, ReactiveCollectionCallback callback) { - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(callback, "ReactiveDatabaseCallback must not be null!"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(callback, "ReactiveDatabaseCallback must not be null"); Mono> collectionPublisher = doGetDatabase() .map(database -> getAndPrepareCollection(database, collectionName)); @@ -650,8 +646,8 @@ public Flux createFlux(String collectionName, ReactiveCollectionCallback< */ public Mono createMono(String collectionName, ReactiveCollectionCallback callback) { - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(callback, "ReactiveCollectionCallback must not be null!"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(callback, "ReactiveCollectionCallback must not be null"); Mono> collectionPublisher = doGetDatabase() .map(database -> getAndPrepareCollection(database, collectionName)); @@ -660,22 +656,16 @@ public Mono createMono(String collectionName, ReactiveCollectionCallback< .onErrorMap(translateException()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.Class) - */ + @Override public Mono> createCollection(Class entityClass) { - return createCollection(entityClass, CollectionOptions.empty()); + return createCollection(entityClass, operations.forType(entityClass).getCollectionOptions()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.Class, org.springframework.data.mongodb.core.CollectionOptions) - */ + @Override public Mono> createCollection(Class entityClass, @Nullable CollectionOptions collectionOptions) { - Assert.notNull(entityClass, "EntityClass must not be null!"); + Assert.notNull(entityClass, "EntityClass must not be null"); CollectionOptions options = collectionOptions != null ? collectionOptions : CollectionOptions.empty(); options = Optionals @@ -686,65 +676,81 @@ public Mono> createCollection(Class entityClass return doCreateCollection(getCollectionName(entityClass), convertToCreateCollectionOptions(options, entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.String) - */ + @Override public Mono> createCollection(String collectionName) { return doCreateCollection(collectionName, new CreateCollectionOptions()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.String, org.springframework.data.mongodb.core.CollectionOptions) - */ + @Override public Mono> createCollection(String collectionName, @Nullable CollectionOptions collectionOptions) { return doCreateCollection(collectionName, convertToCreateCollectionOptions(collectionOptions)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#getCollection(java.lang.String) - */ + @Override + public Mono> createView(String name, Class source, AggregationPipeline pipeline, + @Nullable ViewOptions options) { + + return createView(name, getCollectionName(source), + queryOperations.createAggregation(Aggregation.newAggregation(source, pipeline.getOperations()), source), + options); + } + + @Override + public Mono> createView(String name, String source, AggregationPipeline pipeline, + @Nullable ViewOptions options) { + + return createView(name, source, + queryOperations.createAggregation(Aggregation.newAggregation(pipeline.getOperations()), (Class) null), + options); + } + + private Mono> createView(String name, String source, AggregationDefinition aggregation, + @Nullable ViewOptions options) { + return doCreateView(name, source, aggregation.getAggregationPipeline(), options); + } + + protected Mono> doCreateView(String name, String source, List pipeline, + @Nullable ViewOptions options) { + + CreateViewOptions viewOptions = new CreateViewOptions(); + if (options != null) { + options.getCollation().map(Collation::toMongoCollation).ifPresent(viewOptions::collation); + } + + return execute(db -> { + return Flux.from(db.createView(name, source, pipeline, viewOptions)) + .then(Mono.fromSupplier(() -> db.getCollection(name))); + }).next(); + } + + @Override public Mono> getCollection(String collectionName) { - Assert.notNull(collectionName, "Collection name must not be null!"); + Assert.notNull(collectionName, "Collection name must not be null"); return createMono(db -> Mono.just(db.getCollection(collectionName))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#collectionExists(java.lang.Class) - */ + @Override public Mono collectionExists(Class entityClass) { return collectionExists(getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#collectionExists(java.lang.String) - */ + @Override public Mono collectionExists(String collectionName) { - return createMono(db -> Flux.from(db.listCollectionNames()) // + return createMono(db -> Flux.from(MongoCompatibilityAdapter.reactiveMongoDatabaseAdapter().forDb(db).listCollectionNames()) // .filter(s -> s.equals(collectionName)) // .map(s -> true) // .single(false)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#dropCollection(java.lang.Class) - */ + @Override public Mono dropCollection(Class entityClass) { return dropCollection(getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#dropCollection(java.lang.String) - */ + @Override public Mono dropCollection(String collectionName) { return createMono(collectionName, MongoCollection::drop).doOnSuccess(success -> { @@ -754,12 +760,34 @@ public Mono dropCollection(String collectionName) { }).then(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#getCollectionNames() - */ + @Override + public ReactiveBulkOperations bulkOps(BulkMode mode, String collectionName) { + return bulkOps(mode, null, collectionName); + } + + @Override + public ReactiveBulkOperations bulkOps(BulkMode mode, Class entityClass) { + return bulkOps(mode, entityClass, getCollectionName(entityClass)); + } + + @Override + public ReactiveBulkOperations bulkOps(BulkMode mode, @Nullable Class entityType, String collectionName) { + + Assert.notNull(mode, "BulkMode must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + + DefaultReactiveBulkOperations operations = new DefaultReactiveBulkOperations(this, collectionName, + new ReactiveBulkOperationContext(mode, Optional.ofNullable(getPersistentEntity(entityType)), queryMapper, + updateMapper, eventPublisher, entityCallbacks)); + + operations.setDefaultWriteConcern(writeConcern); + + return operations; + } + + @Override public Flux getCollectionNames() { - return createFlux(MongoDatabase::listCollectionNames); + return createFlux(db -> MongoCompatibilityAdapter.reactiveMongoDatabaseAdapter().forDb(db).listCollectionNames()); } public Mono getMongoDatabase() { @@ -770,49 +798,34 @@ protected Mono doGetDatabase() { return ReactiveMongoDatabaseUtils.getDatabase(mongoDatabaseFactory, sessionSynchronization); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findOne(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ + @Override public Mono findOne(Query query, Class entityClass) { return findOne(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findOne(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ + @Override public Mono findOne(Query query, Class entityClass, String collectionName) { if (ObjectUtils.isEmpty(query.getSortObject())) { - return doFindOne(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass, - new QueryFindPublisherPreparer(query, entityClass)); + return doFindOne(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), entityClass, new QueryFindPublisherPreparer(query, entityClass)); } query.limit(1); return find(query, entityClass, collectionName).next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#exists(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ + @Override public Mono exists(Query query, Class entityClass) { return exists(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#exists(org.springframework.data.mongodb.core.query.Query, java.lang.String) - */ + @Override public Mono exists(Query query, String collectionName) { return exists(query, null, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#exists(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ + @Override public Mono exists(Query query, @Nullable Class entityClass, String collectionName) { if (query == null) { @@ -821,14 +834,15 @@ public Mono exists(Query query, @Nullable Class entityClass, String return createFlux(collectionName, collection -> { + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query); QueryContext queryContext = queryOperations.createQueryContext(query); Document filter = queryContext.getMappedQuery(entityClass, this::getPersistentEntity); - FindPublisher findPublisher = collection.find(filter, Document.class) - .projection(new Document("_id", 1)); + FindPublisher findPublisher = collectionPreparer.prepare(collection).find(filter, Document.class) + .projection(new Document(FieldName.ID.name(), 1)); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("exists: {} in collection: {}", serializeToJsonSafely(filter), collectionName); + LOGGER.debug(String.format("exists: %s in collection: %s", serializeToJsonSafely(filter), collectionName)); } queryContext.applyCollation(entityClass, findPublisher::collation); @@ -837,68 +851,97 @@ public Mono exists(Query query, @Nullable Class entityClass, String }).hasElements(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#find(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ + @Override public Flux find(Query query, Class entityClass) { return find(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#find(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ + @Override public Flux find(@Nullable Query query, Class entityClass, String collectionName) { if (query == null) { return findAll(entityClass, collectionName); } - return doFind(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass, - new QueryFindPublisherPreparer(query, entityClass)); + return doFind(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), entityClass, new QueryFindPublisherPreparer(query, entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findById(java.lang.Object, java.lang.Class) - */ + @Override + public Mono> scroll(Query query, Class entityType) { + + Assert.notNull(entityType, "Entity type must not be null"); + + return scroll(query, entityType, getCollectionName(entityType)); + } + + @Override + public Mono> scroll(Query query, Class entityType, String collectionName) { + return doScroll(query, entityType, entityType, collectionName); + } + + Mono> doScroll(Query query, Class sourceClass, Class targetClass, String collectionName) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(sourceClass, "Entity type must not be null"); + Assert.notNull(targetClass, "Target type must not be null"); + + EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); + ProjectingReadCallback callback = new ProjectingReadCallback<>(mongoConverter, projection, collectionName); + int limit = query.isLimited() ? query.getLimit() + 1 : Integer.MAX_VALUE; + + if (query.hasKeyset()) { + + KeysetScrollQuery keysetPaginationQuery = ScrollUtils.createKeysetPaginationQuery(query, + operations.getIdPropertyName(sourceClass)); + + Mono> result = doFind(collectionName, ReactiveCollectionPreparerDelegate.of(query), + keysetPaginationQuery.query(), keysetPaginationQuery.fields(), sourceClass, + new QueryFindPublisherPreparer(query, keysetPaginationQuery.sort(), limit, 0, sourceClass), callback) + .collectList(); + + return result.map(it -> ScrollUtils.createWindow(query, it, sourceClass, operations)); + } + + Mono> result = doFind(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), sourceClass, + new QueryFindPublisherPreparer(query, query.getSortObject(), limit, query.getSkip(), sourceClass), callback) + .collectList(); + + return result.map( + it -> ScrollUtils.createWindow(it, query.getLimit(), OffsetScrollPosition.positionFunction(query.getSkip()))); + } + + @Override public Mono findById(Object id, Class entityClass) { return findById(id, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findById(java.lang.Object, java.lang.Class, java.lang.String) - */ + @Override public Mono findById(Object id, Class entityClass, String collectionName) { String idKey = operations.getIdPropertyName(entityClass); - return doFindOne(collectionName, new Document(idKey, id), null, entityClass, (Collation) null); + return doFindOne(collectionName, CollectionPreparer.identity(), new Document(idKey, id), null, entityClass, + (Collation) null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findDistinct(org.springframework.data.mongodb.core.query.Query, java.lang.String, java.lang.Class, java.lang.Class) - */ + @Override public Flux findDistinct(Query query, String field, Class entityClass, Class resultClass) { return findDistinct(query, field, getCollectionName(entityClass), entityClass, resultClass); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findDistinct(org.springframework.data.mongodb.core.query.Query, java.lang.String, java.lang.String, java.lang.Class, java.lang.Class) - */ + @Override @SuppressWarnings("unchecked") public Flux findDistinct(Query query, String field, String collectionName, Class entityClass, Class resultClass) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(field, "Field must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); - Assert.notNull(resultClass, "ResultClass must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(field, "Field must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(resultClass, "ResultClass must not be null"); MongoPersistentEntity entity = getPersistentEntity(entityClass); DistinctQueryContext distinctQueryContext = queryOperations.distinctQueryContext(query, field); @@ -906,20 +949,19 @@ public Flux findDistinct(Query query, String field, String collectionName Document mappedQuery = distinctQueryContext.getMappedQuery(entity); String mappedFieldName = distinctQueryContext.getMappedFieldName(entity); Class mongoDriverCompatibleType = distinctQueryContext.getDriverCompatibleClass(resultClass); + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query); Flux result = execute(collectionName, collection -> { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing findDistinct using query {} for field: {} in collection: {}", - serializeToJsonSafely(mappedQuery), field, collectionName); + LOGGER.debug(String.format("Executing findDistinct using query %s for field: %s in collection: %s", + serializeToJsonSafely(mappedQuery), field, collectionName)); } FindPublisherPreparer preparer = new QueryFindPublisherPreparer(query, entityClass); - if (preparer.hasReadPreference()) { - collection = collection.withReadPreference(preparer.getReadPreference()); - } - DistinctPublisher publisher = collection.distinct(mappedFieldName, mappedQuery, mongoDriverCompatibleType); + DistinctPublisher publisher = collectionPreparer.prepare(collection).distinct(mappedFieldName, mappedQuery, + mongoDriverCompatibleType); distinctQueryContext.applyCollation(entityClass, publisher::collation); return publisher; }); @@ -935,80 +977,73 @@ public Flux findDistinct(Query query, String field, String collectionName return (Flux) result; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.TypedAggregation, java.lang.String, java.lang.Class) - */ @Override public Flux aggregate(TypedAggregation aggregation, String inputCollectionName, Class outputType) { - Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); - + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); return doAggregate(aggregation, inputCollectionName, aggregation.getInputType(), outputType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.TypedAggregation, java.lang.Class) - */ @Override public Flux aggregate(TypedAggregation aggregation, Class outputType) { + + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); return aggregate(aggregation, getCollectionName(aggregation.getInputType()), outputType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.Aggregation, java.lang.Class, java.lang.Class) - */ @Override public Flux aggregate(Aggregation aggregation, Class inputType, Class outputType) { return doAggregate(aggregation, getCollectionName(inputType), inputType, outputType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.Aggregation, java.lang.String, java.lang.Class) - */ @Override public Flux aggregate(Aggregation aggregation, String collectionName, Class outputType) { return doAggregate(aggregation, collectionName, null, outputType); } - protected Flux doAggregate(Aggregation aggregation, String collectionName, @Nullable Class inputType, Class outputType) { + protected Flux doAggregate(Aggregation aggregation, String collectionName, @Nullable Class inputType, + Class outputType) { - Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(outputType, "Output type must not be null!"); + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(outputType, "Output type must not be null"); AggregationOptions options = aggregation.getOptions(); - Assert.isTrue(!options.isExplain(), "Cannot use explain option with streaming!"); + Assert.isTrue(!options.isExplain(), "Cannot use explain option with streaming"); AggregationDefinition ctx = queryOperations.createAggregation(aggregation, inputType); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Streaming aggregation: {} in collection {}", serializeToJsonSafely(ctx.getAggregationPipeline()), collectionName); + LOGGER.debug(String.format("Streaming aggregation: %s in collection %s", + serializeToJsonSafely(ctx.getAggregationPipeline()), collectionName)); } ReadDocumentCallback readCallback = new ReadDocumentCallback<>(mongoConverter, outputType, collectionName); - return execute(collectionName, - collection -> aggregateAndMap(collection, ctx.getAggregationPipeline(), ctx.isOutOrMerge(), options, - readCallback, - ctx.getInputType())); + return execute(collectionName, collection -> aggregateAndMap(collection, ctx.getAggregationPipeline(), + ctx.isOutOrMerge(), options, readCallback, ctx.getInputType())); } private Flux aggregateAndMap(MongoCollection collection, List pipeline, - boolean isOutOrMerge, - AggregationOptions options, ReadDocumentCallback readCallback, @Nullable Class inputType) { + boolean isOutOrMerge, AggregationOptions options, ReadDocumentCallback readCallback, + @Nullable Class inputType) { + + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(options); + AggregatePublisher cursor = collectionPreparer.prepare(collection).aggregate(pipeline, Document.class); - AggregatePublisher cursor = collection.aggregate(pipeline, Document.class) - .allowDiskUse(options.isAllowDiskUse()); + if (options.isAllowDiskUseSet()) { + cursor = cursor.allowDiskUse(options.isAllowDiskUse()); + } if (options.getCursorBatchSize() != null) { cursor = cursor.batchSize(options.getCursorBatchSize()); } options.getComment().ifPresent(cursor::comment); - options.getHint().ifPresent(cursor::hint); + + HintFunction hintFunction = options.getHintObject().map(HintFunction::from).orElseGet(HintFunction::empty); + if (hintFunction.isPresent()) { + cursor = hintFunction.apply(mongoDatabaseFactory, cursor::hintString, cursor::hint); + } Optionals.firstNonEmpty(options::getCollation, () -> operations.forType(inputType).getCollation()) // .map(Collation::toMongoCollation) // @@ -1022,22 +1057,14 @@ private Flux aggregateAndMap(MongoCollection collection, List Flux> geoNear(NearQuery near, Class entityClass) { return geoNear(near, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#geoNear(org.springframework.data.mongodb.core.query.NearQuery, java.lang.Class, java.lang.String) - */ @Override public Flux> geoNear(NearQuery near, Class entityClass, String collectionName) { return geoNear(near, entityClass, collectionName, entityClass); @@ -1048,100 +1075,98 @@ protected Flux> geoNear(NearQuery near, Class entityClass, S Class returnType) { if (near == null) { - throw new InvalidDataAccessApiUsageException("NearQuery must not be null!"); + throw new InvalidDataAccessApiUsageException("NearQuery must not be null"); } if (entityClass == null) { - throw new InvalidDataAccessApiUsageException("Entity class must not be null!"); + throw new InvalidDataAccessApiUsageException("Entity class must not be null"); } String collection = StringUtils.hasText(collectionName) ? collectionName : getCollectionName(entityClass); String distanceField = operations.nearQueryDistanceFieldName(entityClass); + EntityProjection projection = operations.introspectProjection(returnType, entityClass); GeoNearResultDocumentCallback callback = new GeoNearResultDocumentCallback<>(distanceField, - new ProjectingReadCallback<>(mongoConverter, entityClass, returnType, collection), near.getMetric()); + new ProjectingReadCallback<>(mongoConverter, projection, collection), near.getMetric()); + + Builder optionsBuilder = AggregationOptions.builder(); + if (near.hasReadPreference()) { + optionsBuilder.readPreference(near.getReadPreference()); + } + + if (near.hasReadConcern()) { + optionsBuilder.readConcern(near.getReadConcern()); + } + + optionsBuilder.collation(near.getCollation()); Aggregation $geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, distanceField)) - .withOptions(AggregationOptions.builder().collation(near.getCollation()).build()); + .withOptions(optionsBuilder.build()); return aggregate($geoNear, collection, Document.class) // - .concatMap(callback::doWith); + .flatMapSequential(callback::doWith); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndModify(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.UpdateDefinition, java.lang.Class) - */ + @Override public Mono findAndModify(Query query, UpdateDefinition update, Class entityClass) { return findAndModify(query, update, new FindAndModifyOptions(), entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndModify(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.UpdateDefinition, java.lang.Class, java.lang.String) - */ + @Override public Mono findAndModify(Query query, UpdateDefinition update, Class entityClass, String collectionName) { return findAndModify(query, update, new FindAndModifyOptions(), entityClass, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndModify(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.UpdateDefinition, org.springframework.data.mongodb.core.FindAndModifyOptions, java.lang.Class) - */ + @Override public Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass) { return findAndModify(query, update, options, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndModify(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.UpdateDefinition, org.springframework.data.mongodb.core.FindAndModifyOptions, java.lang.Class, java.lang.String) - */ + @Override public Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass, String collectionName) { - Assert.notNull(options, "Options must not be null! "); - Assert.notNull(entityClass, "Entity class must not be null!"); + Assert.notNull(options, "Options must not be null "); + Assert.notNull(entityClass, "Entity class must not be null"); FindAndModifyOptions optionsToUse = FindAndModifyOptions.of(options); Optionals.ifAllPresent(query.getCollation(), optionsToUse.getCollation(), (l, r) -> { throw new IllegalArgumentException( - "Both Query and FindAndModifyOptions define a collation. Please provide the collation only via one of the two."); + "Both Query and FindAndModifyOptions define a collation; Please provide the collation only via one of the two"); }); if (!optionsToUse.getCollation().isPresent()) { operations.forType(entityClass).getCollation(query).ifPresent(optionsToUse::collation); } - return doFindAndModify(collectionName, query.getQueryObject(), query.getFieldsObject(), - getMappedSortObject(query, entityClass), entityClass, update, optionsToUse); + return doFindAndModify(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), getMappedSortObject(query, entityClass), entityClass, update, optionsToUse); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndReplace(org.springframework.data.mongodb.core.query.Query, java.lang.Object, org.springframework.data.mongodb.core.FindAndReplaceOptions, java.lang.Class, java.lang.String, java.lang.Class) - */ @Override public Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, String collectionName, Class resultType) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(replacement, "Replacement must not be null!"); - Assert.notNull(options, "Options must not be null! Use FindAndReplaceOptions#empty() instead."); - Assert.notNull(entityType, "Entity class must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(resultType, "ResultType must not be null! Use Object.class instead."); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(replacement, "Replacement must not be null"); + Assert.notNull(options, "Options must not be null Use FindAndReplaceOptions#empty() instead"); + Assert.notNull(entityType, "Entity class must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(resultType, "ResultType must not be null Use Object.class instead"); - Assert.isTrue(query.getLimit() <= 1, "Query must not define a limit other than 1 ore none!"); - Assert.isTrue(query.getSkip() <= 0, "Query must not define skip."); + Assert.isTrue(query.getLimit() <= 1, "Query must not define a limit other than 1 ore none"); + Assert.isTrue(query.getSkip() <= 0, "Query must not define skip"); MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityType); QueryContext queryContext = queryOperations.createQueryContext(query); + EntityProjection projection = operations.introspectProjection(resultType, entityType); Document mappedQuery = queryContext.getMappedQuery(entity); - Document mappedFields = queryContext.getMappedFields(entity, resultType, projectionFactory); + Document mappedFields = queryContext.getMappedFields(entity, projection); Document mappedSort = queryContext.getMappedSort(entity); + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query); return Mono.defer(() -> { @@ -1159,8 +1184,9 @@ public Mono findAndReplace(Query query, S replacement, FindAndReplaceO mapped.getCollection())); }).flatMap(it -> { - Mono afterFindAndReplace = doFindAndReplace(it.getCollection(), mappedQuery, mappedFields, mappedSort, - queryContext.getCollation(entityType).orElse(null), entityType, it.getTarget(), options, resultType); + Mono afterFindAndReplace = doFindAndReplace(it.getCollection(), collectionPreparer, mappedQuery, + mappedFields, mappedSort, queryContext.getCollation(entityType).orElse(null), entityType, it.getTarget(), + options, projection); return afterFindAndReplace.flatMap(saved -> { maybeEmitEvent(new AfterSaveEvent<>(saved, it.getTarget(), it.getCollection())); return maybeCallAfterSave(saved, it.getTarget(), it.getCollection()); @@ -1169,53 +1195,42 @@ public Mono findAndReplace(Query query, S replacement, FindAndReplaceO }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ + @Override public Mono findAndRemove(Query query, Class entityClass) { return findAndRemove(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ + @Override public Mono findAndRemove(Query query, Class entityClass, String collectionName) { operations.forType(entityClass).getCollation(query); - return doFindAndRemove(collectionName, query.getQueryObject(), query.getFieldsObject(), - getMappedSortObject(query, entityClass), operations.forType(entityClass).getCollation(query).orElse(null), - entityClass); + return doFindAndRemove(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), getMappedSortObject(query, entityClass), + operations.forType(entityClass).getCollation(query).orElse(null), entityClass); } /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.Class) */ + @Override public Mono count(Query query, Class entityClass) { - Assert.notNull(entityClass, "Entity class must not be null!"); + Assert.notNull(entityClass, "Entity class must not be null"); return count(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.String) - */ + @Override public Mono count(Query query, String collectionName) { return count(query, null, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ + @Override public Mono count(Query query, @Nullable Class entityClass, String collectionName) { - Assert.notNull(query, "Query must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(query, "Query must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); return createMono(collectionName, collection -> { @@ -1225,22 +1240,14 @@ public Mono count(Query query, @Nullable Class entityClass, String coll Document filter = countContext.getMappedQuery(entityClass, mappingContext::getPersistentEntity); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing count: {} in collection: {}", serializeToJsonSafely(filter), collectionName); + LOGGER.debug( + String.format("Executing count: %s in collection: %s", serializeToJsonSafely(filter), collectionName)); } return doCount(collectionName, filter, options); }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#estimatedCount(java.lang.String) - */ - @Override - public Mono estimatedCount(String collectionName) { - return doEstimatedCount(collectionName, new EstimatedDocumentCountOptions()); - } - /** * Run the actual count operation against the collection with given name. * @@ -1251,13 +1258,54 @@ public Mono estimatedCount(String collectionName) { */ protected Mono doCount(String collectionName, Document filter, CountOptions options) { + if (LOGGER.isDebugEnabled()) { + LOGGER + .debug(String.format("Executing count: %s in collection: %s", serializeToJsonSafely(filter), collectionName)); + } + + return countExecution.countDocuments(collectionName, filter, options); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#estimatedCount(java.lang.String) + */ + @Override + public Mono estimatedCount(String collectionName) { + return doEstimatedCount(collectionName, new EstimatedDocumentCountOptions()); + } + + protected Mono doEstimatedCount(String collectionName, EstimatedDocumentCountOptions options) { + return createMono(collectionName, collection -> collection.estimatedDocumentCount(options)); + } + + @Override + public Mono exactCount(Query query, @Nullable Class entityClass, String collectionName) { + + CountContext countContext = queryOperations.countQueryContext(query); + + CountOptions options = countContext.getCountOptions(entityClass); + Document mappedQuery = countContext.getMappedQuery(entityClass, mappingContext::getPersistentEntity); + + return doExactCount(collectionName, mappedQuery, options); + } + + protected Mono doExactCount(String collectionName, Document filter, CountOptions options) { + return createMono(collectionName, collection -> collection.countDocuments(CountQuery.of(filter).toQueryDocument(), options)); } - protected Mono doEstimatedCount(String collectionName, EstimatedDocumentCountOptions options) { + protected Mono countCanBeEstimated(Document filter, CountOptions options) { - return createMono(collectionName, collection -> collection.estimatedDocumentCount(options)); + if (!filter.isEmpty() || !isEmptyOptions(options)) { + return Mono.just(false); + } + return ReactiveMongoDatabaseUtils.isTransactionActive(getMongoDatabaseFactory()).map(it -> !it); + } + + private boolean isEmptyOptions(CountOptions options) { + return options.getLimit() <= 0 && options.getSkip() <= 0; } /* @@ -1267,53 +1315,39 @@ protected Mono doEstimatedCount(String collectionName, EstimatedDocumentCo @Override public Mono insert(Mono objectToSave) { - Assert.notNull(objectToSave, "Mono to insert must not be null!"); + Assert.notNull(objectToSave, "Mono to insert must not be null"); return objectToSave.flatMap(this::insert); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(reactor.core.publisher.Mono, java.lang.Class) - */ @Override public Flux insertAll(Mono> batchToSave, Class entityClass) { return insertAll(batchToSave, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(reactor.core.publisher.Mono, java.lang.String) - */ @Override public Flux insertAll(Mono> batchToSave, String collectionName) { - Assert.notNull(batchToSave, "Batch to insert must not be null!"); + Assert.notNull(batchToSave, "Batch to insert must not be null"); - return Flux.from(batchToSave).flatMap(collection -> insert(collection, collectionName)); + return Flux.from(batchToSave).flatMapSequential(collection -> insert(collection, collectionName)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(java.lang.Object) - */ + @Override public Mono insert(T objectToSave) { - Assert.notNull(objectToSave, "Object to insert must not be null!"); + Assert.notNull(objectToSave, "Object to insert must not be null"); - ensureNotIterable(objectToSave); + ensureNotCollectionLike(objectToSave); return insert(objectToSave, getCollectionName(ClassUtils.getUserClass(objectToSave))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(java.lang.Object, java.lang.String) - */ + @Override public Mono insert(T objectToSave, String collectionName) { - Assert.notNull(objectToSave, "Object to insert must not be null!"); + Assert.notNull(objectToSave, "Object to insert must not be null"); - ensureNotIterable(objectToSave); + ensureNotCollectionLike(objectToSave); return doInsert(collectionName, objectToSave, this.mongoConverter); } @@ -1347,37 +1381,24 @@ protected Mono doInsert(String collectionName, T objectToSave, MongoWrite }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(java.util.Collection, java.lang.Class) - */ + @Override public Flux insert(Collection batchToSave, Class entityClass) { return doInsertBatch(getCollectionName(entityClass), batchToSave, this.mongoConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(java.util.Collection, java.lang.String) - */ + @Override public Flux insert(Collection batchToSave, String collectionName) { return doInsertBatch(collectionName, batchToSave, this.mongoConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insertAll(java.util.Collection) - */ + @Override public Flux insertAll(Collection objectsToSave) { return doInsertAll(objectsToSave, this.mongoConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insertAll(reactor.core.publisher.Mono) - */ @Override public Flux insertAll(Mono> objectsToSave) { - return Flux.from(objectsToSave).flatMap(this::insertAll); + return Flux.from(objectsToSave).flatMapSequential(this::insertAll); } protected Flux doInsertAll(Collection listToSave, MongoWriter writer) { @@ -1393,13 +1414,13 @@ protected Flux doInsertAll(Collection listToSave, MongoWrite }); return Flux.fromIterable(elementsByCollection.keySet()) - .flatMap(collectionName -> doInsertBatch(collectionName, elementsByCollection.get(collectionName), writer)); + .concatMap(collectionName -> doInsertBatch(collectionName, elementsByCollection.get(collectionName), writer)); } protected Flux doInsertBatch(String collectionName, Collection batchToSave, MongoWriter writer) { - Assert.notNull(writer, "MongoWriter must not be null!"); + Assert.notNull(writer, "MongoWriter must not be null"); Mono, Document>>> prepareDocuments = Flux.fromIterable(batchToSave) .flatMap(uninitialized -> { @@ -1413,11 +1434,16 @@ protected Flux doInsertBatch(String collectionName, Collection(initialized, mapped.getDocument(), collectionName)); + return maybeCallBeforeSave(initialized, mapped.getDocument(), collectionName).map(toSave -> { - maybeEmitEvent(new BeforeSaveEvent<>(initialized, dbDoc, collectionName)); + MappedDocument mappedDocument = queryOperations.createInsertContext(mapped) + .prepareId(uninitialized.getClass()); - return maybeCallBeforeSave(initialized, dbDoc, collectionName).thenReturn(Tuples.of(entity, dbDoc)); + return Tuples.of(entity, mappedDocument.getDocument()); + }); }); }).collectList(); @@ -1428,7 +1454,7 @@ protected Flux doInsertBatch(String collectionName, Collection { + return insertDocuments.flatMapSequential(tuple -> { Document document = tuple.getT2(); Object id = MappedDocument.of(document).getId(); @@ -1439,48 +1465,34 @@ protected Flux doInsertBatch(String collectionName, Collection Mono save(Mono objectToSave) { - Assert.notNull(objectToSave, "Mono to save must not be null!"); + Assert.notNull(objectToSave, "Mono to save must not be null"); return objectToSave.flatMap(this::save); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#save(reactor.core.publisher.Mono, java.lang.String) - */ @Override public Mono save(Mono objectToSave, String collectionName) { - Assert.notNull(objectToSave, "Mono to save must not be null!"); + Assert.notNull(objectToSave, "Mono to save must not be null"); return objectToSave.flatMap(o -> save(o, collectionName)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#save(java.lang.Object) - */ + @Override public Mono save(T objectToSave) { - Assert.notNull(objectToSave, "Object to save must not be null!"); + Assert.notNull(objectToSave, "Object to save must not be null"); return save(objectToSave, getCollectionName(ClassUtils.getUserClass(objectToSave))); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#save(java.lang.Object, java.lang.String) - */ + } + + @Override public Mono save(T objectToSave, String collectionName) { - Assert.notNull(objectToSave, "Object to save must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(objectToSave, "Object to save must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); AdaptibleEntity source = operations.forEntity(objectToSave, mongoConverter.getConversionService()); @@ -1555,10 +1567,12 @@ protected Mono doSave(String collectionName, T objectToSave, MongoWriter< protected Mono insertDocument(String collectionName, Document dbDoc, Class entityClass) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Inserting Document containing fields: " + dbDoc.keySet() + " in collection: " + collectionName); + LOGGER.debug(String + .format("Inserting Document containing fields: " + dbDoc.keySet() + " in collection: " + collectionName)); } - Document document = new Document(dbDoc); + MappedDocument document = MappedDocument.of(dbDoc); + queryOperations.createInsertContext(document).prepareId(entityClass); Flux execute = execute(collectionName, collection -> { @@ -1568,10 +1582,10 @@ protected Mono insertDocument(String collectionName, Document dbDoc, Cla MongoCollection collectionToUse = prepareCollection(collection, writeConcernToUse); - return collectionToUse.insertOne(document); + return collectionToUse.insertOne(document.getDocument()); }); - return Flux.from(execute).last().map(success -> MappedDocument.of(document).getId()); + return Flux.from(execute).last().map(success -> document.getId()); } protected Flux insertDocumentList(String collectionName, List dbDocList) { @@ -1581,10 +1595,10 @@ protected Flux insertDocumentList(String collectionName, List documents = new ArrayList<>(); + List documents = new ArrayList<>(dbDocList.size()); return execute(collectionName, collection -> { @@ -1597,7 +1611,7 @@ protected Flux insertDocumentList(String collectionName, List { + }).flatMapSequential(s -> { return Flux.fromStream(documents.stream() // .map(MappedDocument::of) // @@ -1619,7 +1633,7 @@ private MongoCollection prepareCollection(MongoCollection co protected Mono saveDocument(String collectionName, Document document, Class entityClass) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Saving Document containing fields: " + document.keySet()); + LOGGER.debug(String.format("Saving Document containing fields: %s", document.keySet())); } return createMono(collectionName, collection -> { @@ -1635,12 +1649,13 @@ protected Mono saveDocument(String collectionName, Document document, Cl Publisher publisher; if (!mapped.hasId()) { - publisher = collectionToUse.insertOne(document); + publisher = collectionToUse + .insertOne(queryOperations.createInsertContext(mapped).prepareId(entityClass).getDocument()); } else { MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); UpdateContext updateContext = queryOperations.replaceSingleContext(mapped, true); - Document filter = updateContext.getMappedQuery(entity); + Document filter = updateContext.getReplacementQuery(); Document replacement = updateContext.getMappedUpdate(entity); Mono deferredFilter; @@ -1667,26 +1682,17 @@ protected Mono saveDocument(String collectionName, Document document, Cl } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.UpdateDefinition, java.lang.Class) - */ + @Override public Mono upsert(Query query, UpdateDefinition update, Class entityClass) { return doUpdate(getCollectionName(entityClass), query, update, entityClass, true, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.UpdateDefinition, java.lang.String) - */ + @Override public Mono upsert(Query query, UpdateDefinition update, String collectionName) { return doUpdate(collectionName, query, update, null, true, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.UpdateDefinition, java.lang.Class, java.lang.String) - */ + @Override public Mono upsert(Query query, UpdateDefinition update, Class entityClass, String collectionName) { return doUpdate(collectionName, query, update, entityClass, true, false); } @@ -1695,47 +1701,33 @@ public Mono upsert(Query query, UpdateDefinition update, Class * (non-Javadoc)) * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateFirst(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.UpdateDefinition, java.lang.Class) */ + @Override public Mono updateFirst(Query query, UpdateDefinition update, Class entityClass) { return doUpdate(getCollectionName(entityClass), query, update, entityClass, false, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateFirst(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.UpdateDefinition, java.lang.String) - */ + @Override public Mono updateFirst(Query query, UpdateDefinition update, String collectionName) { return doUpdate(collectionName, query, update, null, false, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateFirst(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.UpdateDefinition, java.lang.Class, java.lang.String) - */ + @Override public Mono updateFirst(Query query, UpdateDefinition update, Class entityClass, String collectionName) { return doUpdate(collectionName, query, update, entityClass, false, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.UpdateDefinition, java.lang.Class) - */ + @Override public Mono updateMulti(Query query, UpdateDefinition update, Class entityClass) { return doUpdate(getCollectionName(entityClass), query, update, entityClass, false, true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.UpdateDefinition, java.lang.String) - */ + @Override public Mono updateMulti(Query query, UpdateDefinition update, String collectionName) { return doUpdate(collectionName, query, update, null, false, true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.UpdateDefinition, java.lang.Class, java.lang.String) - */ + @Override public Mono updateMulti(Query query, UpdateDefinition update, Class entityClass, String collectionName) { return doUpdate(collectionName, query, update, entityClass, false, true); @@ -1744,12 +1736,6 @@ public Mono updateMulti(Query query, UpdateDefinition update, Clas protected Mono doUpdate(String collectionName, Query query, @Nullable UpdateDefinition update, @Nullable Class entityClass, boolean upsert, boolean multi) { - if (query.isSorted() && LOGGER.isWarnEnabled()) { - - LOGGER.warn("{} does not support sort ('{}'). Please use findAndModify() instead.", - upsert ? "Upsert" : "UpdateFirst", serializeToJsonSafely(query.getSortObject())); - } - MongoPersistentEntity entity = entityClass == null ? null : getPersistentEntity(entityClass); UpdateContext updateContext = multi ? queryOperations.updateContext(update, query, upsert) @@ -1757,7 +1743,7 @@ protected Mono doUpdate(String collectionName, Query query, @Nulla updateContext.increaseVersionForUpdateIfNecessary(entity); Document queryObj = updateContext.getMappedQuery(entity); - UpdateOptions updateOptions = updateContext.getUpdateOptions(entityClass); + UpdateOptions updateOptions = updateContext.getUpdateOptions(entityClass, query); Flux result; @@ -1813,7 +1799,7 @@ protected Mono doUpdate(String collectionName, Query query, @Nulla deferredFilter = Mono.just(filter); } - ReplaceOptions replaceOptions = updateContext.getReplaceOptions(entityClass); + com.mongodb.client.model.ReplaceOptions replaceOptions = updateContext.getReplaceOptions(entityClass); return deferredFilter.flatMap(it -> Mono.from(collectionToUse.replaceOne(it, updateObj, replaceOptions))); } @@ -1829,8 +1815,7 @@ protected Mono doUpdate(String collectionName, Query query, @Nulla Document updateObj = updateContext.getMappedUpdate(entity); if (containsVersionProperty(queryObj, entity)) - throw new OptimisticLockingFailureException("Optimistic lock exception on saving entity: " - + updateObj.toString() + " to collection " + collectionName); + throw new OptimisticLockingFailureException("Optimistic lock exception on saving entity %s to collection %s".formatted(entity.getName(), collectionName)); } } }); @@ -1847,43 +1832,29 @@ private boolean containsVersionProperty(Document document, @Nullable MongoPersis return document.containsKey(persistentEntity.getRequiredVersionProperty().getFieldName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(reactor.core.publisher.Mono) - */ @Override public Mono remove(Mono objectToRemove) { return objectToRemove.flatMap(this::remove); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(reactor.core.publisher.Mono, java.lang.String) - */ @Override public Mono remove(Mono objectToRemove, String collectionName) { return objectToRemove.flatMap(it -> remove(it, collectionName)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(java.lang.Object) - */ + @Override public Mono remove(Object object) { - Assert.notNull(object, "Object must not be null!"); + Assert.notNull(object, "Object must not be null"); return remove(operations.forEntity(object).getRemoveByQuery(), object.getClass()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(java.lang.Object, java.lang.String) - */ + @Override public Mono remove(Object object, String collectionName) { - Assert.notNull(object, "Object must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(object, "Object must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); return doRemove(collectionName, operations.forEntity(object).getRemoveByQuery(), object.getClass()); } @@ -1903,32 +1874,23 @@ private void assertUpdateableIdIfNotSet(Object value) { if (!MongoSimpleTypes.AUTOGENERATED_ID_TYPES.contains(property.getType())) { throw new InvalidDataAccessApiUsageException( - String.format("Cannot autogenerate id of type %s for entity of type %s!", property.getType().getName(), + String.format("Cannot autogenerate id of type %s for entity of type %s", property.getType().getName(), value.getClass().getName())); } } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(org.springframework.data.mongodb.core.query.Query, java.lang.String) - */ + @Override public Mono remove(Query query, String collectionName) { return remove(query, null, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ + @Override public Mono remove(Query query, Class entityClass) { return remove(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ + @Override public Mono remove(Query query, @Nullable Class entityClass, String collectionName) { return doRemove(collectionName, query, entityClass); } @@ -1936,10 +1898,10 @@ public Mono remove(Query query, @Nullable Class entityClass, St protected Mono doRemove(String collectionName, Query query, @Nullable Class entityClass) { if (query == null) { - throw new InvalidDataAccessApiUsageException("Query passed in to remove can't be null!"); + throw new InvalidDataAccessApiUsageException("Query passed in to remove can't be null"); } - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); MongoPersistentEntity entity = getPersistentEntity(entityClass); @@ -1950,16 +1912,18 @@ protected Mono doRemove(String collectionName, Query query, @N MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.REMOVE, collectionName, entityClass, null, removeQuery); WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query); return execute(collectionName, collection -> { maybeEmitEvent(new BeforeDeleteEvent<>(removeQuery, entityClass, collectionName)); - MongoCollection collectionToUse = prepareCollection(collection, writeConcernToUse); + MongoCollection collectionToUse = collectionPreparer + .prepare(prepareCollection(collection, writeConcernToUse)); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Remove using query: {} in collection: {}.", - new Object[] { serializeToJsonSafely(removeQuery), collectionName }); + LOGGER.debug(String.format("Remove using query: %s in collection: %s.", serializeToJsonSafely(removeQuery), + collectionName)); } if (query.getLimit() > 0 || query.getSkip() > 0) { @@ -1983,78 +1947,85 @@ protected Mono doRemove(String collectionName, Query query, @N .next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAll(java.lang.Class) - */ + @Override public Flux findAll(Class entityClass) { return findAll(entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAll(java.lang.Class, java.lang.String) - */ + @Override public Flux findAll(Class entityClass, String collectionName) { - return executeFindMultiInternal(new FindCallback(null), FindPublisherPreparer.NO_OP_PREPARER, - new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName), collectionName); + return executeFindMultiInternal(new FindCallback(CollectionPreparer.identity(), null), + FindPublisherPreparer.NO_OP_PREPARER, new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName), + collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.String) - */ @Override @SuppressWarnings("unchecked") public Flux findAllAndRemove(Query query, String collectionName) { return (Flux) findAllAndRemove(query, Object.class, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ @Override public Flux findAllAndRemove(Query query, Class entityClass) { return findAllAndRemove(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override public Flux findAllAndRemove(Query query, Class entityClass, String collectionName) { return doFindAndDelete(collectionName, query, entityClass); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#tail(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ + @Override + public Mono replace(Query query, T replacement, ReplaceOptions options, String collectionName) { + + Assert.notNull(replacement, "Replacement must not be null"); + return replace(query, (Class) ClassUtils.getUserClass(replacement), replacement, options, collectionName); + } + + protected Mono replace(Query query, Class entityType, T replacement, ReplaceOptions options, + String collectionName) { + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityType); + UpdateContext updateContext = queryOperations.replaceSingleContext(query, + operations.forEntity(replacement).toMappedDocument(this.mongoConverter), options.isUpsert()); + + return createMono(collectionName, collection -> { + + Document mappedUpdate = updateContext.getMappedUpdate(entity); + + MongoAction action = new MongoAction(writeConcern, MongoActionOperation.REPLACE, collectionName, entityType, + mappedUpdate, updateContext.getQueryObject()); + + MongoCollection collectionToUse = createCollectionPreparer(query, action).prepare(collection); + + return collectionToUse.replaceOne(updateContext.getMappedQuery(entity), mappedUpdate, + updateContext.getReplaceOptions(entityType, it -> { + it.upsert(options.isUpsert()); + })); + }); + } + @Override public Flux tail(Query query, Class entityClass) { return tail(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#tail(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override public Flux tail(@Nullable Query query, Class entityClass, String collectionName) { + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query); if (query == null) { LOGGER.debug(String.format("Tail for class: %s in collection: %s", entityClass, collectionName)); return executeFindMultiInternal( - collection -> new FindCallback(null).doInCollection(collection).cursorType(CursorType.TailableAwait), + collection -> new FindCallback(collectionPreparer, null).doInCollection(collection) + .cursorType(CursorType.TailableAwait), FindPublisherPreparer.NO_OP_PREPARER, new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName), collectionName); } - return doFind(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass, + return doFind(collectionName, collectionPreparer, query.getQueryObject(), query.getFieldsObject(), entityClass, new TailingQueryFindPublisherPreparer(query, entityClass)); } @@ -2077,10 +2048,20 @@ public Flux> changeStream(@Nullable String database, @N publisher = filter.isEmpty() ? db.watch(Document.class) : db.watch(filter, Document.class); } - publisher = options.getResumeToken().map(BsonValue::asDocument).map(publisher::resumeAfter).orElse(publisher); + if (options.isResumeAfter()) { + publisher = options.getResumeToken().map(BsonValue::asDocument).map(publisher::resumeAfter) + .orElse(publisher); + } else if (options.isStartAfter()) { + publisher = options.getResumeToken().map(BsonValue::asDocument).map(publisher::startAfter) + .orElse(publisher); + } publisher = options.getCollation().map(Collation::toMongoCollation).map(publisher::collation) .orElse(publisher); publisher = options.getResumeBsonTimestamp().map(publisher::startAtOperationTime).orElse(publisher); + + if (options.getFullDocumentBeforeChangeLookup().isPresent()) { + publisher = publisher.fullDocumentBeforeChange(options.getFullDocumentBeforeChangeLookup().get()); + } return publisher.fullDocument(options.getFullDocumentLookup().orElse(fullDocument)); }) // .flatMapMany(publisher -> Flux.from(publisher) @@ -2091,10 +2072,9 @@ List prepareFilter(ChangeStreamOptions options) { Object filter = options.getFilter().orElse(Collections.emptyList()); - if (filter instanceof Aggregation) { - Aggregation agg = (Aggregation) filter; - AggregationOperationContext context = agg instanceof TypedAggregation - ? new TypeBasedAggregationOperationContext(((TypedAggregation) agg).getInputType(), + if (filter instanceof Aggregation agg) { + AggregationOperationContext context = agg instanceof TypedAggregation typedAggregation + ? new TypeBasedAggregationOperationContext(typedAggregation.getInputType(), getConverter().getMappingContext(), queryMapper) : new RelaxedTypeBasedAggregationOperationContext(Object.class, mappingContext, queryMapper); @@ -2110,10 +2090,7 @@ List prepareFilter(ChangeStreamOptions options) { "ChangeStreamRequestOptions.filter mut be either an Aggregation or a plain list of Documents"); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#mapReduce(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.Class, java.lang.String, java.lang.String, org.springframework.data.mongodb.core.mapreduce.MapReduceOptions) - */ + @Override public Flux mapReduce(Query filterQuery, Class domainType, Class resultType, String mapFunction, String reduceFunction, MapReduceOptions options) { @@ -2121,29 +2098,28 @@ public Flux mapReduce(Query filterQuery, Class domainType, Class re options); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#mapReduce(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String, java.lang.Class, java.lang.String, java.lang.String, org.springframework.data.mongodb.core.mapreduce.MapReduceOptions) - */ + @Override public Flux mapReduce(Query filterQuery, Class domainType, String inputCollectionName, Class resultType, String mapFunction, String reduceFunction, MapReduceOptions options) { - Assert.notNull(filterQuery, "Filter query must not be null!"); - Assert.notNull(domainType, "Domain type must not be null!"); - Assert.hasText(inputCollectionName, "Input collection name must not be null or empty!"); - Assert.notNull(resultType, "Result type must not be null!"); - Assert.notNull(mapFunction, "Map function must not be null!"); - Assert.notNull(reduceFunction, "Reduce function must not be null!"); - Assert.notNull(options, "MapReduceOptions must not be null!"); + Assert.notNull(filterQuery, "Filter query must not be null"); + Assert.notNull(domainType, "Domain type must not be null"); + Assert.hasText(inputCollectionName, "Input collection name must not be null or empty"); + Assert.notNull(resultType, "Result type must not be null"); + Assert.notNull(mapFunction, "Map function must not be null"); + Assert.notNull(reduceFunction, "Reduce function must not be null"); + Assert.notNull(options, "MapReduceOptions must not be null"); assertLocalFunctionNames(mapFunction, reduceFunction); + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(filterQuery); return createFlux(inputCollectionName, collection -> { Document mappedQuery = queryMapper.getMappedObject(filterQuery.getQueryObject(), mappingContext.getPersistentEntity(domainType)); - MapReducePublisher publisher = collection.mapReduce(mapFunction, reduceFunction, Document.class); + MapReducePublisher publisher = collectionPreparer.prepare(collection).mapReduce(mapFunction, + reduceFunction, Document.class); publisher.filter(mappedQuery); @@ -2152,15 +2128,16 @@ public Flux mapReduce(Query filterQuery, Class domainType, String inpu publisher.sort(mappedSort); } - if (filterQuery.getMeta().getMaxTimeMsec() != null) { - publisher.maxTime(filterQuery.getMeta().getMaxTimeMsec(), TimeUnit.MILLISECONDS); + Meta meta = filterQuery.getMeta(); + if (meta.hasMaxTime()) { + publisher.maxTime(meta.getRequiredMaxTimeMsec(), TimeUnit.MILLISECONDS); } if (filterQuery.getLimit() > 0 || (options.getLimit() != null)) { if (filterQuery.getLimit() > 0 && (options.getLimit() != null)) { throw new IllegalArgumentException( - "Both Query and MapReduceOptions define a limit. Please provide the limit only via one of the two."); + "Both Query and MapReduceOptions define a limit; Please provide the limit only via one of the two."); } if (filterQuery.getLimit() > 0) { @@ -2176,7 +2153,7 @@ public Flux mapReduce(Query filterQuery, Class domainType, String inpu Optionals.ifAllPresent(filterQuery.getCollation(), options.getCollation(), (l, r) -> { throw new IllegalArgumentException( - "Both Query and MapReduceOptions define a collation. Please provide the collation only via one of the two."); + "Both Query and MapReduceOptions define a collation; Please provide the collation only via one of the two."); }); if (options.getCollation().isPresent()) { @@ -2200,7 +2177,7 @@ public Flux mapReduce(Query filterQuery, Class domainType, String inpu } if (options.getOutputSharded().isPresent()) { - publisher = publisher.sharded(options.getOutputSharded().get()); + MongoCompatibilityAdapter.mapReducePublisherAdapter(publisher).sharded(options.getOutputSharded().get()); } if (StringUtils.hasText(options.getOutputCollection()) && !options.usesInlineOutput()) { @@ -2214,7 +2191,7 @@ public Flux mapReduce(Query filterQuery, Class domainType, String inpu publisher = collation.map(Collation::toMongoCollation).map(publisher::collation).orElse(publisher); return Flux.from(publisher) - .concatMap(new ReadDocumentCallback<>(mongoConverter, resultType, inputCollectionName)::doWith); + .flatMapSequential(new ReadDocumentCallback<>(mongoConverter, resultType, inputCollectionName)::doWith); }); } @@ -2225,70 +2202,42 @@ private static void assertLocalFunctionNames(String... functions) { if (ResourceUtils.isUrl(function)) { throw new IllegalArgumentException(String.format( - "Blocking accessing to resource %s is not allowed using reactive infrastructure. You may load the resource at startup and cache its value.", + "Blocking accessing to resource %s is not allowed using reactive infrastructure; You may load the resource at startup and cache its value.", function)); } } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation#query(java.lang.Class) - */ @Override public ReactiveFind query(Class domainType) { return new ReactiveFindOperationSupport(this).query(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation#update(java.lang.Class) - */ @Override public ReactiveUpdate update(Class domainType) { return new ReactiveUpdateOperationSupport(this).update(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveRemoveOperation#remove(java.lang.Class) - */ @Override public ReactiveRemove remove(Class domainType) { return new ReactiveRemoveOperationSupport(this).remove(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveInsertOperation#insert(java.lang.Class) - */ @Override public ReactiveInsert insert(Class domainType) { return new ReactiveInsertOperationSupport(this).insert(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveAggregationOperation#aggregateAndReturn(java.lang.Class) - */ @Override public ReactiveAggregation aggregateAndReturn(Class domainType) { return new ReactiveAggregationOperationSupport(this).aggregateAndReturn(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMapReduceOperation#mapReduce(java.lang.Class) - */ @Override public ReactiveMapReduce mapReduce(Class domainType) { return new ReactiveMapReduceOperationSupport(this).mapReduce(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation#changeStream(java.lang.Class) - */ @Override public ReactiveChangeStream changeStream(Class domainType) { return new ReactiveChangeStreamOperationSupport(this).changeStream(domainType); @@ -2310,7 +2259,7 @@ protected Flux doFindAndDelete(String collectionName, Query query, Class< return Flux.from(flux).collectList().filter(it -> !it.isEmpty()) .flatMapMany(list -> Flux.from(remove(operations.getByIdInQuery(list), entityClass, collectionName)) - .flatMap(deleteResult -> Flux.fromIterable(list))); + .flatMapSequential(deleteResult -> Flux.fromIterable(list))); } /** @@ -2327,7 +2276,7 @@ protected Mono> doCreateCollection(String collectionNa // TODO: Emit a collection created event if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Created collection [{}]", collectionName); + LOGGER.debug(String.format("Created collection [%s]", collectionName)); } }).then(getCollection(collectionName)); @@ -2338,16 +2287,18 @@ protected Mono> doCreateCollection(String collectionNa * The query document is specified as a standard {@link Document} and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record. * @param fields the document that specifies the fields to be returned. * @param entityClass the parameterized type of the returned list. * @param collation can be {@literal null}. * @return the {@link List} of converted objects. */ - protected Mono doFindOne(String collectionName, Document query, @Nullable Document fields, + protected Mono doFindOne(String collectionName, + CollectionPreparer> collectionPreparer, Document query, @Nullable Document fields, Class entityClass, @Nullable Collation collation) { - return doFindOne(collectionName, query, fields, entityClass, + return doFindOne(collectionName, collectionPreparer, query, fields, entityClass, findPublisher -> collation != null ? findPublisher.collation(collation.toMongoCollation()) : findPublisher); } @@ -2356,6 +2307,7 @@ protected Mono doFindOne(String collectionName, Document query, @Nullable * The query document is specified as a standard {@link Document} and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record. * @param fields the document that specifies the fields to be returned. * @param entityClass the parameterized type of the returned list. @@ -2363,14 +2315,15 @@ protected Mono doFindOne(String collectionName, Document query, @Nullable * @return the {@link List} of converted objects. * @since 2.2 */ - protected Mono doFindOne(String collectionName, Document query, @Nullable Document fields, + protected Mono doFindOne(String collectionName, + CollectionPreparer> collectionPreparer, Document query, @Nullable Document fields, Class entityClass, FindPublisherPreparer preparer) { MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); QueryContext queryContext = queryOperations .createQueryContext(new BasicQuery(query, fields != null ? fields : new Document())); - Document mappedFields = queryContext.getMappedFields(entity, entityClass, projectionFactory); + Document mappedFields = queryContext.getMappedFields(entity, EntityProjection.nonProjecting(entityClass)); Document mappedQuery = queryContext.getMappedQuery(entity); if (LOGGER.isDebugEnabled()) { @@ -2378,7 +2331,7 @@ protected Mono doFindOne(String collectionName, Document query, @Nullable serializeToJsonSafely(query), mappedFields, entityClass, collectionName)); } - return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields, preparer), + return executeFindOneInternal(new FindOneCallback(collectionPreparer, mappedQuery, mappedFields, preparer), new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); } @@ -2387,13 +2340,15 @@ protected Mono doFindOne(String collectionName, Document query, @Nullable * query document is specified as a standard Document and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record * @param fields the document that specifies the fields to be returned * @param entityClass the parameterized type of the returned list. * @return the List of converted objects. */ - protected Flux doFind(String collectionName, Document query, Document fields, Class entityClass) { - return doFind(collectionName, query, fields, entityClass, null, + protected Flux doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class entityClass) { + return doFind(collectionName, collectionPreparer, query, fields, entityClass, null, new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName)); } @@ -2403,6 +2358,7 @@ protected Flux doFind(String collectionName, Document query, Document fie * specified as a standard Document and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record. * @param fields the document that specifies the fields to be returned. * @param entityClass the parameterized type of the returned list. @@ -2410,19 +2366,20 @@ protected Flux doFind(String collectionName, Document query, Document fie * the result set, (apply limits, skips and so on). * @return the {@link List} of converted objects. */ - protected Flux doFind(String collectionName, Document query, Document fields, Class entityClass, - FindPublisherPreparer preparer) { - return doFind(collectionName, query, fields, entityClass, preparer, + protected Flux doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class entityClass, FindPublisherPreparer preparer) { + return doFind(collectionName, collectionPreparer, query, fields, entityClass, preparer, new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName)); } - protected Flux doFind(String collectionName, Document query, Document fields, Class entityClass, - @Nullable FindPublisherPreparer preparer, DocumentCallback objectCallback) { + protected Flux doFind(String collectionName, + CollectionPreparer> collectionPreparer, Document query, Document fields, + Class entityClass, @Nullable FindPublisherPreparer preparer, DocumentCallback objectCallback) { MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); - Document mappedFields = queryContext.getMappedFields(entity, entityClass, projectionFactory); + Document mappedFields = queryContext.getMappedFields(entity, EntityProjection.nonProjecting(entityClass)); Document mappedQuery = queryContext.getMappedQuery(entity); if (LOGGER.isDebugEnabled()) { @@ -2430,8 +2387,23 @@ protected Flux doFind(String collectionName, Document query, Document serializeToJsonSafely(mappedQuery), mappedFields, entityClass, collectionName)); } - return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer, objectCallback, - collectionName); + return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields), preparer, + objectCallback, collectionName); + } + + CollectionPreparer> createCollectionPreparer(Query query) { + return ReactiveCollectionPreparerDelegate.of(query); + } + + CollectionPreparer> createCollectionPreparer(Query query, @Nullable MongoAction action) { + CollectionPreparer> collectionPreparer = createCollectionPreparer(query); + if (action == null) { + return collectionPreparer; + } + return collectionPreparer.andThen(collection -> { + WriteConcern writeConcern = prepareWriteConcern(action); + return writeConcern != null ? collection.withWriteConcern(writeConcern) : collection; + }); } /** @@ -2440,39 +2412,23 @@ protected Flux doFind(String collectionName, Document query, Document * * @since 2.0 */ - Flux doFind(String collectionName, Document query, Document fields, Class sourceClass, - Class targetClass, FindPublisherPreparer preparer) { + Flux doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class sourceClass, Class targetClass, FindPublisherPreparer preparer) { MongoPersistentEntity entity = mappingContext.getPersistentEntity(sourceClass); + EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); - Document mappedFields = queryContext.getMappedFields(entity, targetClass, projectionFactory); + Document mappedFields = queryContext.getMappedFields(entity, projection); Document mappedQuery = queryContext.getMappedQuery(entity); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("find using query: {} fields: {} for class: {} in collection: {}", - serializeToJsonSafely(mappedQuery), mappedFields, sourceClass, collectionName); - } - - return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer, - new ProjectingReadCallback<>(mongoConverter, sourceClass, targetClass, collectionName), collectionName); - } - - private Document getMappedFieldsObject(Document fields, @Nullable MongoPersistentEntity entity, - Class targetType) { - - if (entity == null) { - return fields; - } - - Document projectedFields = propertyOperations.computeFieldsForProjection(projectionFactory, fields, - entity.getType(), targetType); - - if (ObjectUtils.nullSafeEquals(fields, projectedFields)) { - return queryMapper.getMappedFields(projectedFields, entity); + LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s", + serializeToJsonSafely(mappedQuery), mappedFields, sourceClass, collectionName)); } - return queryMapper.getMappedFields(projectedFields, mappingContext.getRequiredPersistentEntity(targetType)); + return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields), preparer, + new ProjectingReadCallback<>(mongoConverter, projection, collectionName), collectionName); } protected CreateCollectionOptions convertToCreateCollectionOptions(@Nullable CollectionOptions collectionOptions) { @@ -2481,72 +2437,39 @@ protected CreateCollectionOptions convertToCreateCollectionOptions(@Nullable Col protected CreateCollectionOptions convertToCreateCollectionOptions(@Nullable CollectionOptions collectionOptions, Class entityType) { - - CreateCollectionOptions result = new CreateCollectionOptions(); - - if (collectionOptions == null) { - return result; - } - - collectionOptions.getCapped().ifPresent(result::capped); - collectionOptions.getSize().ifPresent(result::sizeInBytes); - collectionOptions.getMaxDocuments().ifPresent(result::maxDocuments); - collectionOptions.getCollation().map(Collation::toMongoCollation).ifPresent(result::collation); - - collectionOptions.getValidationOptions().ifPresent(it -> { - - ValidationOptions validationOptions = new ValidationOptions(); - - it.getValidationAction().ifPresent(validationOptions::validationAction); - it.getValidationLevel().ifPresent(validationOptions::validationLevel); - - it.getValidator().ifPresent(val -> validationOptions.validator(getMappedValidator(val, entityType))); - - result.validationOptions(validationOptions); - }); - - return result; - } - - private Document getMappedValidator(Validator validator, Class domainType) { - - Document validationRules = validator.toDocument(); - - if (validationRules.containsKey("$jsonSchema")) { - return schemaMapper.mapSchema(validationRules, domainType); - } - - return queryMapper.getMappedObject(validationRules, mappingContext.getPersistentEntity(domainType)); + return operations.convertToCreateCollectionOptions(collectionOptions, entityType); } /** * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. - * The first document that matches the query is returned and also removed from the collection in the database. - *

    + * The first document that matches the query is returned and also removed from the collection in the database.
    * The query document is specified as a standard Document and so is the fields specification. * - * @param collectionName name of the collection to retrieve the objects from - * @param query the query document that specifies the criteria used to find a record - * @param collation collation + * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. + * @param query the query document that specifies the criteria used to find a record. + * @param collation collation. * @param entityClass the parameterized type of the returned list. * @return the List of converted objects. */ - protected Mono doFindAndRemove(String collectionName, Document query, Document fields, Document sort, + protected Mono doFindAndRemove(String collectionName, + CollectionPreparer> collectionPreparer, Document query, Document fields, Document sort, @Nullable Collation collation, Class entityClass) { if (LOGGER.isDebugEnabled()) { LOGGER.debug(String.format("findAndRemove using query: %s fields: %s sort: %s for class: %s in collection: %s", - serializeToJsonSafely(query), fields, sort, entityClass, collectionName)); + serializeToJsonSafely(query), fields, serializeToJsonSafely(sort), entityClass, collectionName)); } MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - return executeFindOneInternal( - new FindAndRemoveCallback(queryMapper.getMappedObject(query, entity), fields, sort, collation), + return executeFindOneInternal(new FindAndRemoveCallback(collectionPreparer, + queryMapper.getMappedObject(query, entity), fields, sort, collation), new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); } - protected Mono doFindAndModify(String collectionName, Document query, Document fields, Document sort, + protected Mono doFindAndModify(String collectionName, + CollectionPreparer> collectionPreparer, Document query, Document fields, Document sort, Class entityClass, UpdateDefinition update, FindAndModifyOptions options) { MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); @@ -2562,12 +2485,13 @@ protected Mono doFindAndModify(String collectionName, Document query, Doc if (LOGGER.isDebugEnabled()) { LOGGER.debug(String.format( "findAndModify using query: %s fields: %s sort: %s for class: %s and update: %s " + "in collection: %s", - serializeToJsonSafely(mappedQuery), fields, sort, entityClass, serializeToJsonSafely(mappedUpdate), + serializeToJsonSafely(mappedQuery), fields, serializeToJsonSafely(sort), entityClass, + serializeToJsonSafely(mappedUpdate), collectionName)); } return executeFindOneInternal( - new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, + new FindAndModifyCallback(collectionPreparer, mappedQuery, fields, sort, mappedUpdate, update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()), options), new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); }); @@ -2577,6 +2501,7 @@ protected Mono doFindAndModify(String collectionName, Document query, Doc * Customize this part for findAndReplace. * * @param collectionName The name of the collection to perform the operation in. + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param mappedQuery the query to look up documents. * @param mappedFields the fields to project the result to. * @param mappedSort the sort to be applied when executing the query. @@ -2589,33 +2514,58 @@ protected Mono doFindAndModify(String collectionName, Document query, Doc * {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}. * @since 2.1 */ - protected Mono doFindAndReplace(String collectionName, Document mappedQuery, Document mappedFields, + protected Mono doFindAndReplace(String collectionName, + CollectionPreparer> collectionPreparer, Document mappedQuery, Document mappedFields, Document mappedSort, com.mongodb.client.model.Collation collation, Class entityType, Document replacement, FindAndReplaceOptions options, Class resultType) { + EntityProjection projection = operations.introspectProjection(resultType, entityType); + + return doFindAndReplace(collectionName, collectionPreparer, mappedQuery, mappedFields, mappedSort, collation, + entityType, replacement, options, projection); + } + + /** + * Customize this part for findAndReplace. + * + * @param collectionName The name of the collection to perform the operation in. + * @param collectionPreparer the preparer to prepare the collection for the actual use. + * @param mappedQuery the query to look up documents. + * @param mappedFields the fields to project the result to. + * @param mappedSort the sort to be applied when executing the query. + * @param collation collation settings for the query. Can be {@literal null}. + * @param entityType the source domain type. + * @param replacement the replacement {@link Document}. + * @param options applicable options. + * @param projection the projection descriptor. + * @return {@link Mono#empty()} if object does not exist, {@link FindAndReplaceOptions#isReturnNew() return new} is + * {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}. + * @since 3.4 + */ + private Mono doFindAndReplace(String collectionName, + CollectionPreparer> collectionPreparer, Document mappedQuery, Document mappedFields, + Document mappedSort, com.mongodb.client.model.Collation collation, Class entityType, Document replacement, + FindAndReplaceOptions options, EntityProjection projection) { + return Mono.defer(() -> { if (LOGGER.isDebugEnabled()) { - LOGGER.debug( - "findAndReplace using query: {} fields: {} sort: {} for class: {} and replacement: {} " - + "in collection: {}", - serializeToJsonSafely(mappedQuery), mappedFields, mappedSort, entityType, - serializeToJsonSafely(replacement), collectionName); + LOGGER.debug(String.format( + "findAndReplace using query: %s fields: %s sort: %s for class: %s and replacement: %s " + + "in collection: %s", + serializeToJsonSafely(mappedQuery), mappedFields, serializeToJsonSafely(mappedSort), entityType, + serializeToJsonSafely(replacement), collectionName)); } - return executeFindOneInternal( - new FindAndReplaceCallback(mappedQuery, mappedFields, mappedSort, replacement, collation, options), - new ProjectingReadCallback<>(this.mongoConverter, entityType, resultType, collectionName), collectionName); + return executeFindOneInternal(new FindAndReplaceCallback(collectionPreparer, mappedQuery, mappedFields, + mappedSort, replacement, collation, options), + new ProjectingReadCallback<>(this.mongoConverter, projection, collectionName), collectionName); }); } protected , T> E maybeEmitEvent(E event) { - - if (eventPublisher != null) { - eventPublisher.publishEvent(event); - } - + eventDelegate.publishEvent(event); return event; } @@ -2665,17 +2615,6 @@ private MongoCollection getAndPrepareCollection(MongoDatabase db, Stri } } - /** - * Ensure the given {@literal source} is not an {@link java.lang.reflect.Array}, {@link Collection} or - * {@link Iterator}. - * - * @param source can be {@literal null}. - * @deprecated since 3.2. Call {@link #ensureNotCollectionLike(Object)} instead. - */ - protected void ensureNotIterable(@Nullable Object source) { - ensureNotCollectionLike(source); - } - /** * Ensure the given {@literal source} is not an {@link java.lang.reflect.Array}, {@link Collection} or * {@link Iterator}. @@ -2697,7 +2636,12 @@ protected void ensureNotCollectionLike(@Nullable Object source) { * @param collection */ protected MongoCollection prepareCollection(MongoCollection collection) { - return this.readPreference != null ? collection.withReadPreference(readPreference) : collection; + + if (this.readPreference != null && this.readPreference != collection.getReadPreference()) { + return collection.withReadPreference(readPreference); + } + + return collection; } /** @@ -2740,7 +2684,7 @@ private WriteConcern potentiallyForceAcknowledgedWrite(@Nullable WriteConcern wc if (ObjectUtils.nullSafeEquals(WriteResultChecking.EXCEPTION, writeResultChecking)) { if (wc == null || wc.getWObject() == null - || (wc.getWObject() instanceof Number && ((Number) wc.getWObject()).intValue() < 1)) { + || (wc.getWObject()instanceof Number concern && concern.intValue() < 1)) { return WriteConcern.ACKNOWLEDGED; } } @@ -2790,7 +2734,7 @@ private Flux executeFindMultiInternal(ReactiveCollectionQueryCallback { return Flux.from(preparer.initiateFind(collection, collectionCallback::doInCollection)) - .concatMap(objectCallback::doWith); + .flatMapSequential(objectCallback::doWith); }); } @@ -2803,8 +2747,8 @@ private Function translateException() { return throwable -> { - if (throwable instanceof RuntimeException) { - return potentiallyConvertRuntimeException((RuntimeException) throwable, exceptionTranslator); + if (throwable instanceof RuntimeException runtimeException) { + return potentiallyConvertRuntimeException(runtimeException, exceptionTranslator); } return throwable; @@ -2846,13 +2790,24 @@ private MappingMongoConverter getDefaultMongoConverter() { return converter; } + @Nullable private Document getMappedSortObject(Query query, Class type) { if (query == null) { return null; } - return queryMapper.getMappedSort(query.getSortObject(), mappingContext.getPersistentEntity(type)); + return getMappedSortObject(query.getSortObject(), type); + } + + @Nullable + private Document getMappedSortObject(Document sortObject, Class type) { + + if (ObjectUtils.isEmpty(sortObject)) { + return null; + } + + return queryMapper.getMappedSort(sortObject, mappingContext.getPersistentEntity(type)); } // Callback implementations @@ -2867,11 +2822,14 @@ private Document getMappedSortObject(Query query, Class type) { */ private static class FindOneCallback implements ReactiveCollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Optional fields; private final FindPublisherPreparer preparer; - FindOneCallback(Document query, @Nullable Document fields, FindPublisherPreparer preparer) { + FindOneCallback(CollectionPreparer> collectionPreparer, Document query, + @Nullable Document fields, FindPublisherPreparer preparer) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = Optional.ofNullable(fields); this.preparer = preparer; @@ -2881,13 +2839,8 @@ private static class FindOneCallback implements ReactiveCollectionCallback doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - if (LOGGER.isDebugEnabled()) { - - LOGGER.debug("findOne using query: {} fields: {} in db.collection: {}", serializeToJsonSafely(query), - serializeToJsonSafely(fields.orElseGet(Document::new)), collection.getNamespace().getFullName()); - } - - FindPublisher publisher = preparer.initiateFind(collection, col -> col.find(query, Document.class)); + FindPublisher publisher = preparer.initiateFind(collectionPreparer.prepare(collection), + col -> col.find(query, Document.class)); if (fields.isPresent()) { publisher = publisher.projection(fields.get()); @@ -2905,15 +2858,17 @@ public Publisher doInCollection(MongoCollection collection) */ private static class FindCallback implements ReactiveCollectionQueryCallback { + private final CollectionPreparer> collectionPreparer; + private final @Nullable Document query; private final @Nullable Document fields; - FindCallback(@Nullable Document query) { - this(query, null); + FindCallback(CollectionPreparer> collectionPreparer, @Nullable Document query) { + this(collectionPreparer, query, null); } - FindCallback(Document query, Document fields) { - + FindCallback(CollectionPreparer> collectionPreparer, Document query, Document fields) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; } @@ -2921,11 +2876,12 @@ private static class FindCallback implements ReactiveCollectionQueryCallback doInCollection(MongoCollection collection) { + MongoCollection collectionToUse = collectionPreparer.prepare(collection); FindPublisher findPublisher; if (ObjectUtils.isEmpty(query)) { - findPublisher = collection.find(Document.class); + findPublisher = collectionToUse.find(Document.class); } else { - findPublisher = collection.find(query, Document.class); + findPublisher = collectionToUse.find(query, Document.class); } if (ObjectUtils.isEmpty(fields)) { @@ -2944,13 +2900,15 @@ public FindPublisher doInCollection(MongoCollection collecti */ private static class FindAndRemoveCallback implements ReactiveCollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Document fields; private final Document sort; private final Optional collation; - FindAndRemoveCallback(Document query, Document fields, Document sort, @Nullable Collation collation) { - + FindAndRemoveCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, Document sort, @Nullable Collation collation) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; this.sort = sort; @@ -2964,7 +2922,7 @@ public Publisher doInCollection(MongoCollection collection) FindOneAndDeleteOptions findOneAndDeleteOptions = convertToFindOneAndDeleteOptions(fields, sort); collation.map(Collation::toMongoCollation).ifPresent(findOneAndDeleteOptions::collation); - return collection.findOneAndDelete(query, findOneAndDeleteOptions); + return collectionPreparer.prepare(collection).findOneAndDelete(query, findOneAndDeleteOptions); } } @@ -2973,6 +2931,7 @@ public Publisher doInCollection(MongoCollection collection) */ private static class FindAndModifyCallback implements ReactiveCollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Document fields; private final Document sort; @@ -2980,9 +2939,10 @@ private static class FindAndModifyCallback implements ReactiveCollectionCallback private final List arrayFilters; private final FindAndModifyOptions options; - FindAndModifyCallback(Document query, Document fields, Document sort, Object update, List arrayFilters, - FindAndModifyOptions options) { + FindAndModifyCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, Document sort, Object update, List arrayFilters, FindAndModifyOptions options) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; this.sort = sort; @@ -2995,21 +2955,22 @@ private static class FindAndModifyCallback implements ReactiveCollectionCallback public Publisher doInCollection(MongoCollection collection) throws MongoException, DataAccessException { + MongoCollection collectionToUse = collectionPreparer.prepare(collection); if (options.isRemove()) { FindOneAndDeleteOptions findOneAndDeleteOptions = convertToFindOneAndDeleteOptions(fields, sort); findOneAndDeleteOptions = options.getCollation().map(Collation::toMongoCollation) .map(findOneAndDeleteOptions::collation).orElse(findOneAndDeleteOptions); - return collection.findOneAndDelete(query, findOneAndDeleteOptions); + return collectionToUse.findOneAndDelete(query, findOneAndDeleteOptions); } FindOneAndUpdateOptions findOneAndUpdateOptions = convertToFindOneAndUpdateOptions(options, fields, sort, arrayFilters); - if (update instanceof Document) { - return collection.findOneAndUpdate(query, (Document) update, findOneAndUpdateOptions); + if (update instanceof Document document) { + return collection.findOneAndUpdate(query, document, findOneAndUpdateOptions); } else if (update instanceof List) { - return collection.findOneAndUpdate(query, (List) update, findOneAndUpdateOptions); + return collectionToUse.findOneAndUpdate(query, (List) update, findOneAndUpdateOptions); } return Flux @@ -3048,6 +3009,7 @@ private static FindOneAndUpdateOptions convertToFindOneAndUpdateOptions(FindAndM */ private static class FindAndReplaceCallback implements ReactiveCollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Document fields; private final Document sort; @@ -3055,9 +3017,10 @@ private static class FindAndReplaceCallback implements ReactiveCollectionCallbac private final @Nullable com.mongodb.client.model.Collation collation; private final FindAndReplaceOptions options; - FindAndReplaceCallback(Document query, Document fields, Document sort, Document update, - com.mongodb.client.model.Collation collation, FindAndReplaceOptions options) { - + FindAndReplaceCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, Document sort, Document update, com.mongodb.client.model.Collation collation, + FindAndReplaceOptions options) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; this.sort = sort; @@ -3066,16 +3029,12 @@ private static class FindAndReplaceCallback implements ReactiveCollectionCallbac this.options = options; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveCollectionCallback#doInCollection(com.mongodb.reactivestreams.client.MongoCollection) - */ @Override public Publisher doInCollection(MongoCollection collection) throws MongoException, DataAccessException { FindOneAndReplaceOptions findOneAndReplaceOptions = convertToFindOneAndReplaceOptions(options, fields, sort); - return collection.findOneAndReplace(query, update, findOneAndReplaceOptions); + return collectionPreparer.prepare(collection).findOneAndReplace(query, update, findOneAndReplaceOptions); } private FindOneAndReplaceOptions convertToFindOneAndReplaceOptions(FindAndReplaceOptions options, Document fields, @@ -3132,6 +3091,7 @@ interface MongoDatabaseCallback { */ interface ReactiveCollectionQueryCallback extends ReactiveCollectionCallback { + @Override FindPublisher doInCollection(MongoCollection collection) throws MongoException, DataAccessException; } @@ -3150,14 +3110,15 @@ class ReadDocumentCallback implements DocumentCallback { ReadDocumentCallback(EntityReader reader, Class type, String collectionName) { - Assert.notNull(reader, "EntityReader must not be null!"); - Assert.notNull(type, "Entity type must not be null!"); + Assert.notNull(reader, "EntityReader must not be null"); + Assert.notNull(type, "Entity type must not be null"); this.reader = reader; this.type = type; this.collectionName = collectionName; } + @Override public Mono doWith(Document document) { maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName)); @@ -3185,37 +3146,30 @@ public Mono doWith(Document document) { */ private class ProjectingReadCallback implements DocumentCallback { - private final EntityReader reader; - private final Class entityType; - private final Class targetType; + private final MongoConverter reader; + private final EntityProjection projection; private final String collectionName; - ProjectingReadCallback(EntityReader reader, Class entityType, Class targetType, - String collectionName) { + ProjectingReadCallback(MongoConverter reader, EntityProjection projection, String collectionName) { this.reader = reader; - this.entityType = entityType; - this.targetType = targetType; + this.projection = projection; this.collectionName = collectionName; } + @Override @SuppressWarnings("unchecked") public Mono doWith(Document document) { - Class typeToRead = targetType.isInterface() || targetType.isAssignableFrom(entityType) // - ? entityType // - : targetType; + Class returnType = projection.getMappedType().getType(); + maybeEmitEvent(new AfterLoadEvent<>(document, returnType, collectionName)); - maybeEmitEvent(new AfterLoadEvent<>(document, typeToRead, collectionName)); - - Object entity = reader.read(typeToRead, document); + Object entity = reader.project(projection, document); if (entity == null) { throw new MappingException(String.format("EntityReader %s returned null", reader)); } - Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, entity) : entity; - - T castEntity = (T) result; + T castEntity = (T) entity; maybeEmitEvent(new AfterConvertEvent<>(document, castEntity, collectionName)); return maybeCallAfterConvert(castEntity, document, collectionName); } @@ -3245,13 +3199,14 @@ static class GeoNearResultDocumentCallback implements DocumentCallback delegate, Metric metric) { - Assert.notNull(delegate, "DocumentCallback must not be null!"); + Assert.notNull(delegate, "DocumentCallback must not be null"); this.distanceField = distanceField; this.delegate = delegate; this.metric = metric; } + @Override public Mono> doWith(Document object) { double distance = getDistance(object); @@ -3275,14 +3230,28 @@ public Mono> doWith(Document object) { class QueryFindPublisherPreparer implements FindPublisherPreparer { private final Query query; + + private final Document sortObject; + + private final int limit; + + private final long skip; private final @Nullable Class type; QueryFindPublisherPreparer(Query query, @Nullable Class type) { + this(query, query.getSortObject(), query.getLimit(), query.getSkip(), type); + } + + QueryFindPublisherPreparer(Query query, Document sortObject, int limit, long skip, @Nullable Class type) { this.query = query; + this.sortObject = sortObject; + this.limit = limit; + this.skip = skip; this.type = type; } + @Override public FindPublisher prepare(FindPublisher findPublisher) { FindPublisher findPublisherToUse = operations.forType(type) // @@ -3291,46 +3260,40 @@ public FindPublisher prepare(FindPublisher findPublisher) { .map(findPublisher::collation) // .orElse(findPublisher); + HintFunction hintFunction = HintFunction.from(query.getHint()); Meta meta = query.getMeta(); - if (query.getSkip() <= 0 && query.getLimit() <= 0 && ObjectUtils.isEmpty(query.getSortObject()) - && !StringUtils.hasText(query.getHint()) && !meta.hasValues()) { + if (skip <= 0 && limit <= 0 && ObjectUtils.isEmpty(sortObject) && hintFunction.isEmpty() && !meta.hasValues()) { return findPublisherToUse; } try { - if (query.getSkip() > 0) { - findPublisherToUse = findPublisherToUse.skip((int) query.getSkip()); + if (skip > 0) { + findPublisherToUse = findPublisherToUse.skip((int) skip); } - if (query.getLimit() > 0) { - findPublisherToUse = findPublisherToUse.limit(query.getLimit()); + if (limit > 0) { + findPublisherToUse = findPublisherToUse.limit(limit); } - if (!ObjectUtils.isEmpty(query.getSortObject())) { - Document sort = type != null ? getMappedSortObject(query, type) : query.getSortObject(); + if (!ObjectUtils.isEmpty(sortObject)) { + Document sort = type != null ? getMappedSortObject(sortObject, type) : sortObject; findPublisherToUse = findPublisherToUse.sort(sort); } - if (StringUtils.hasText(query.getHint())) { - - String hint = query.getHint(); - - if (BsonUtils.isJsonDocument(hint)) { - findPublisherToUse = findPublisherToUse.hint(BsonUtils.parse(hint, mongoDatabaseFactory)); - } else { - findPublisherToUse = findPublisherToUse.hintString(hint); - } + if (hintFunction.isPresent()) { + findPublisherToUse = hintFunction.apply(mongoDatabaseFactory, findPublisherToUse::hintString, + findPublisherToUse::hint); } if (meta.hasValues()) { - if (StringUtils.hasText(meta.getComment())) { - findPublisherToUse = findPublisherToUse.comment(meta.getComment()); + if (meta.hasComment()) { + findPublisherToUse = findPublisherToUse.comment(meta.getRequiredComment()); } - if (meta.getMaxTimeMsec() != null) { - findPublisherToUse = findPublisherToUse.maxTime(meta.getMaxTimeMsec(), TimeUnit.MILLISECONDS); + if (meta.hasMaxTime()) { + findPublisherToUse = findPublisherToUse.maxTime(meta.getRequiredMaxTimeMsec(), TimeUnit.MILLISECONDS); } if (meta.getCursorBatchSize() != null) { @@ -3349,11 +3312,6 @@ public FindPublisher prepare(FindPublisher findPublisher) { return findPublisherToUse; } - @Override - public ReadPreference getReadPreference() { - return (query.getMeta().getFlags().contains(CursorOption.SECONDARY_READS) - || query.getMeta().getFlags().contains(CursorOption.SLAVE_OK)) ? ReadPreference.primaryPreferred() : null; - } } class TailingQueryFindPublisherPreparer extends QueryFindPublisherPreparer { @@ -3374,8 +3332,7 @@ private static List toDocuments(Collection + * server through the driver API.
    * The prepare steps for {@link MongoDatabase} and {@link MongoCollection} proxy the target and invoke the desired * target method matching the actual arguments plus a {@link ClientSession}. * @@ -3399,10 +3356,6 @@ static class ReactiveSessionBoundMongoTemplate extends ReactiveMongoTemplate { this.session = session; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoTemplate#getCollection(java.lang.String) - */ @Override public Mono> getCollection(String collectionName) { @@ -3410,16 +3363,17 @@ public Mono> getCollection(String collectionName) { return delegate.getCollection(collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoTemplate#getMongoDatabase() - */ @Override public Mono getMongoDatabase() { // native MongoDB objects that offer methods with ClientSession must not be proxied. return delegate.getMongoDatabase(); } + + @Override + protected Mono countCanBeEstimated(Document filter, CountOptions options) { + return Mono.just(false); + } } class IndexCreatorEventListener implements ApplicationListener> { @@ -3440,9 +3394,9 @@ public void onApplicationEvent(MappingContextEvent event) { PersistentEntity entity = event.getPersistentEntity(); // Double check type as Spring infrastructure does not consider nested generics - if (entity instanceof MongoPersistentEntity) { + if (entity instanceof MongoPersistentEntity mongoPersistentProperties) { - onCheckForIndexes((MongoPersistentEntity) entity, subscriptionExceptionHandler); + onCheckForIndexes(mongoPersistentProperties, subscriptionExceptionHandler); } } } @@ -3497,4 +3451,9 @@ String getCollection() { return collection; } } + + @FunctionalInterface + interface CountExecution { + Mono countDocuments(String collection, Document filter, CountOptions options); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperation.java index dbe3386df9..378f13d917 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -69,7 +69,7 @@ interface TerminatingRemove { /** * Remove and return all matching documents.
    - * NOTE The entire list of documents will be fetched before sending the actual delete commands. + * NOTE: The entire list of documents will be fetched before sending the actual delete commands. * Also, {@link org.springframework.context.ApplicationEvent}s will be published for each and every delete * operation. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupport.java index 3cb11c0626..97c9cb0d0e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupport.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -35,22 +35,18 @@ class ReactiveRemoveOperationSupport implements ReactiveRemoveOperation { private static final Query ALL_QUERY = new Query(); - private final ReactiveMongoTemplate tempate; + private final ReactiveMongoTemplate template; - ReactiveRemoveOperationSupport(ReactiveMongoTemplate tempate) { - this.tempate = tempate; + ReactiveRemoveOperationSupport(ReactiveMongoTemplate template) { + this.template = template; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveRemoveOperation#remove(java.lang.Class) - */ @Override public ReactiveRemove remove(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); - return new ReactiveRemoveSupport<>(tempate, domainType, ALL_QUERY, null); + return new ReactiveRemoveSupport<>(template, domainType, ALL_QUERY, null); } static class ReactiveRemoveSupport implements ReactiveRemove, RemoveWithCollection { @@ -68,34 +64,22 @@ static class ReactiveRemoveSupport implements ReactiveRemove, RemoveWithCo this.collection = collection; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveRemoveOperation.RemoveWithCollection#inCollection(String) - */ @Override public RemoveWithQuery inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty!"); + Assert.hasText(collection, "Collection must not be null nor empty"); return new ReactiveRemoveSupport<>(template, domainType, query, collection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveRemoveOperation.RemoveWithQuery#matching(org.springframework.data.mongodb.core.Query) - */ @Override public TerminatingRemove matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); return new ReactiveRemoveSupport<>(template, domainType, query, collection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveRemoveOperation.TerminatingRemove#all() - */ @Override public Mono all() { @@ -104,10 +88,6 @@ public Mono all() { return template.doRemove(collectionName, query, domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveRemoveOperation.TerminatingRemove#findAndRemove() - */ @Override public Flux findAndRemove() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java index c9b15324fc..aeb0e88e24 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,7 +32,7 @@ public interface ReactiveSessionCallback { /** * Execute operations against a MongoDB instance via session bound {@link ReactiveMongoOperations}. The session is * inferred directly into the operation so that no further interaction is necessary. - *

    + *
    * Please note that only Spring Data-specific abstractions like {@link ReactiveMongoOperations#find(Query, Class)} and * others are enhanced with the {@link com.mongodb.session.ClientSession}. When obtaining plain MongoDB gateway * objects like {@link com.mongodb.reactivestreams.client.MongoCollection} or diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java index 17c17edd24..f0ffc1ba60 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,7 +33,7 @@ public interface ReactiveSessionScoped { /** * Executes the given {@link ReactiveSessionCallback} within the {@link com.mongodb.session.ClientSession}. - *

    + *
    * It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() * closed} when done. * @@ -47,7 +47,7 @@ default Flux execute(ReactiveSessionCallback action) { /** * Executes the given {@link ReactiveSessionCallback} within the {@link com.mongodb.session.ClientSession}. - *

    + *
    * It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() * closed} when done. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperation.java index 60e7a5ba89..51f75f3265 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -72,13 +72,30 @@ interface TerminatingFindAndModify { Mono findAndModify(); } + /** + * Trigger replaceOne + * execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since 4.2 + */ + interface TerminatingReplace { + + /** + * Find first and replace/upsert. + * + * @return never {@literal null}. + */ + Mono replaceFirst(); + } + /** * Compose findAndReplace execution by calling one of the terminating methods. * * @author Mark Paluch * @since 2.1 */ - interface TerminatingFindAndReplace { + interface TerminatingFindAndReplace extends TerminatingReplace { /** * Find, replace and return the first matching document. @@ -202,6 +219,22 @@ interface FindAndModifyWithOptions { TerminatingFindAndModify withOptions(FindAndModifyOptions options); } + /** + * @author Christoph Strobl + * @since 4.2 + */ + interface ReplaceWithOptions extends TerminatingReplace { + + /** + * Explicitly define {@link ReplaceOptions}. + * + * @param options must not be {@literal null}. + * @return new instance of {@link FindAndReplaceOptions}. + * @throws IllegalArgumentException if options is {@literal null}. + */ + TerminatingReplace withOptions(ReplaceOptions options); + } + /** * Define {@link FindAndReplaceOptions}. * @@ -209,7 +242,7 @@ interface FindAndModifyWithOptions { * @author Christoph Strobl * @since 2.1 */ - interface FindAndReplaceWithOptions extends TerminatingFindAndReplace { + interface FindAndReplaceWithOptions extends TerminatingFindAndReplace, ReplaceWithOptions { /** * Explicitly define {@link FindAndReplaceOptions} for the {@link Update}. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupport.java index 598f691b1d..51cd99dc93 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupport.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -42,14 +42,10 @@ class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation { this.template = template; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation#update(java.lang.Class) - */ @Override public ReactiveUpdate update(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ReactiveUpdateSupport<>(template, domainType, ALL_QUERY, null, null, null, null, null, domainType); } @@ -83,54 +79,34 @@ static class ReactiveUpdateSupport this.targetType = targetType; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithUpdate#apply(org.springframework.data.mongodb.core.query.UpdateDefinition) - */ @Override public TerminatingUpdate apply(org.springframework.data.mongodb.core.query.UpdateDefinition update) { - Assert.notNull(update, "Update must not be null!"); + Assert.notNull(update, "Update must not be null"); return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, findAndReplaceOptions, replacement, targetType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithCollection#inCollection(java.lang.String) - */ @Override public UpdateWithQuery inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty!"); + Assert.hasText(collection, "Collection must not be null nor empty"); return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, findAndReplaceOptions, replacement, targetType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.TerminatingUpdate#first() - */ @Override public Mono first() { return doUpdate(false, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.TerminatingUpdate#upsert() - */ @Override public Mono upsert() { return doUpdate(true, true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.TerminatingFindAndModify#findAndModify() - */ @Override public Mono findAndModify() { @@ -141,10 +117,6 @@ public Mono findAndModify() { collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.TerminatingFindAndReplace#findAndReplace() - */ @Override public Mono findAndReplace() { return template.findAndReplace(query, replacement, @@ -152,80 +124,79 @@ public Mono findAndReplace() { getCollectionName(), targetType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithQuery#matching(org.springframework.data.mongodb.core.Query) - */ @Override public UpdateWithUpdate matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, findAndReplaceOptions, replacement, targetType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.TerminatingUpdate#all() - */ @Override public Mono all() { return doUpdate(true, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.FindAndModifyWithOptions#withOptions(org.springframework.data.mongodb.core.FindAndModifyOptions) - */ @Override public TerminatingFindAndModify withOptions(FindAndModifyOptions options) { - Assert.notNull(options, "Options must not be null!"); + Assert.notNull(options, "Options must not be null"); return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, options, findAndReplaceOptions, replacement, targetType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithUpdate#replaceWith(java.lang.Object) - */ @Override public FindAndReplaceWithProjection replaceWith(T replacement) { - Assert.notNull(replacement, "Replacement must not be null!"); + Assert.notNull(replacement, "Replacement must not be null"); return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, findAndReplaceOptions, replacement, targetType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.FindAndReplaceWithOptions#withOptions(org.springframework.data.mongodb.core.FindAndReplaceOptions) - */ @Override public FindAndReplaceWithProjection withOptions(FindAndReplaceOptions options) { - Assert.notNull(options, "Options must not be null!"); + Assert.notNull(options, "Options must not be null"); return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, options, replacement, targetType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.FindAndReplaceWithProjection#as(java.lang.Class) - */ + @Override + public TerminatingReplace withOptions(ReplaceOptions options) { + + FindAndReplaceOptions target = new FindAndReplaceOptions(); + if (options.isUpsert()) { + target.upsert(); + } + return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + target, replacement, targetType); + } + @Override public FindAndReplaceWithOptions as(Class resultType) { - Assert.notNull(resultType, "ResultType must not be null!"); + Assert.notNull(resultType, "ResultType must not be null"); return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, findAndReplaceOptions, replacement, resultType); } + @Override + public Mono replaceFirst() { + + if (replacement != null) { + return template.replace(query, domainType, replacement, + findAndReplaceOptions != null ? findAndReplaceOptions : ReplaceOptions.none(), getCollectionName()); + } + + return template.replace(query, domainType, update, + findAndReplaceOptions != null ? findAndReplaceOptions : ReplaceOptions.none(), getCollectionName()); + } + private Mono doUpdate(boolean multi, boolean upsert) { return template.doUpdate(getCollectionName(), query, update, domainType, upsert, multi); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadConcernAware.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadConcernAware.java new file mode 100644 index 0000000000..00c5815fc9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadConcernAware.java @@ -0,0 +1,46 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.lang.Nullable; + +import com.mongodb.ReadConcern; + +/** + * Interface to be implemented by any object that wishes to expose the {@link ReadConcern}. + *

    + * Typically implemented by cursor or query preparer objects. + * + * @author Mark Paluch + * @since 4.1 + * @see org.springframework.data.mongodb.core.query.Query + * @see org.springframework.data.mongodb.core.aggregation.AggregationOptions + */ +public interface ReadConcernAware { + + /** + * @return {@literal true} if a {@link ReadConcern} is set. + */ + default boolean hasReadConcern() { + return getReadConcern() != null; + } + + /** + * @return the {@link ReadConcern} to apply or {@literal null} if none set. + */ + @Nullable + ReadConcern getReadConcern(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadPreferenceAware.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadPreferenceAware.java index 6aefb78cd5..74bca9abea 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadPreferenceAware.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadPreferenceAware.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2021 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,6 +27,8 @@ * @author Christoph Strobl * @author Mark Paluch * @since 2.2 + * @see org.springframework.data.mongodb.core.query.Query + * @see org.springframework.data.mongodb.core.aggregation.AggregationOptions */ public interface ReadPreferenceAware { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReplaceOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReplaceOptions.java new file mode 100644 index 0000000000..a2e2ba24c0 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReplaceOptions.java @@ -0,0 +1,87 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.data.mongodb.core.query.Query; + +/** + * Options for {@link org.springframework.data.mongodb.core.MongoOperations#replace(Query, Object) replace operations}. Defaults to + *

    + *
    upsert
    + *
    false
    + *
    + * + * @author Jakub Zurawa + * @author Christoph Strob + * @since 4.2 + */ +public class ReplaceOptions { + + private boolean upsert; + + private static final ReplaceOptions NONE = new ReplaceOptions() { + + private static final String ERROR_MSG = "ReplaceOptions.none() cannot be changed; Please use ReplaceOptions.options() instead"; + + @Override + public ReplaceOptions upsert() { + throw new UnsupportedOperationException(ERROR_MSG); + } + }; + + /** + * Static factory method to create a {@link ReplaceOptions} instance. + *
    + *
    upsert
    + *
    false
    + *
    + * + * @return new instance of {@link ReplaceOptions}. + */ + public static ReplaceOptions replaceOptions() { + return new ReplaceOptions(); + } + + /** + * Static factory method returning an unmodifiable {@link ReplaceOptions} instance. + * + * @return unmodifiable {@link ReplaceOptions} instance. + */ + public static ReplaceOptions none() { + return NONE; + } + + /** + * Insert a new document if not exists. + * + * @return this. + */ + public ReplaceOptions upsert() { + + this.upsert = true; + return this; + } + + /** + * Get the bit indicating if to create a new document if not exists. + * + * @return {@literal true} if set. + */ + public boolean isUpsert() { + return upsert; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java index 24ad1c5ffc..a01760368a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java @@ -1,5 +1,5 @@ /* - * Copyright 2014-2021 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,7 +23,7 @@ /** - * Script operations on {@link com.mongodb.DB} level. Allows interaction with server side JavaScript functions. + * Script operations on {@link com.mongodb.client.MongoDatabase} level. Allows interaction with server side JavaScript functions. * * @author Christoph Strobl * @author Oliver Gierke @@ -34,7 +34,7 @@ public interface ScriptOperations { /** - * Store given {@link ExecutableMongoScript} generating a syntheitcal name so that it can be called by it + * Store given {@link ExecutableMongoScript} generating a synthetic name so that it can be called by it * subsequently. * * @param script must not be {@literal null}. @@ -72,10 +72,10 @@ public interface ScriptOperations { Object call(String scriptName, Object... args); /** - * Checks {@link DB} for existence of {@link ServerSideJavaScript} with given name. + * Checks {@link com.mongodb.client.MongoDatabase} for existence of {@literal ServerSideJavaScript} with given name. * * @param scriptName must not be {@literal null} or empty. - * @return false if no {@link ServerSideJavaScript} with given name exists. + * @return false if no {@literal ServerSideJavaScript} with given name exists. */ boolean exists(String scriptName); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScrollUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScrollUtils.java new file mode 100644 index 0000000000..85ddce7656 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScrollUtils.java @@ -0,0 +1,268 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.function.IntFunction; + +import org.bson.BsonNull; +import org.bson.Document; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.ScrollPosition.Direction; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.EntityOperations.Entity; +import org.springframework.data.mongodb.core.query.Query; + +/** + * Utilities to run scroll queries and create {@link Window} results. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 4.1 + */ +class ScrollUtils { + + /** + * Create the actual query to run keyset-based pagination. Affects projection, sorting, and the criteria. + * + * @param query + * @param idPropertyName + * @return + */ + static KeysetScrollQuery createKeysetPaginationQuery(Query query, String idPropertyName) { + + KeysetScrollPosition keyset = query.getKeyset(); + KeysetScrollDirector director = KeysetScrollDirector.of(keyset.getDirection()); + Document sortObject = director.getSortObject(idPropertyName, query); + Document fieldsObject = director.getFieldsObject(query.getFieldsObject(), sortObject); + Document queryObject = director.createQuery(keyset, query.getQueryObject(), sortObject); + + return new KeysetScrollQuery(queryObject, fieldsObject, sortObject); + } + + static Window createWindow(Query query, List result, Class sourceType, EntityOperations operations) { + + Document sortObject = query.getSortObject(); + KeysetScrollPosition keyset = query.getKeyset(); + Direction direction = keyset.getDirection(); + KeysetScrollDirector director = KeysetScrollDirector.of(direction); + + List resultsToUse = director.postPostProcessResults(result, query.getLimit()); + + IntFunction positionFunction = value -> { + + T last = resultsToUse.get(value); + Entity entity = operations.forEntity(last); + + Map keys = entity.extractKeys(sortObject, sourceType); + return ScrollPosition.of(keys, direction); + }; + + return Window.from(resultsToUse, positionFunction, hasMoreElements(result, query.getLimit())); + } + + static Window createWindow(List result, int limit, IntFunction positionFunction) { + return Window.from(getSubList(result, limit), positionFunction, hasMoreElements(result, limit)); + } + + static boolean hasMoreElements(List result, int limit) { + return !result.isEmpty() && result.size() > limit; + } + + static List getSubList(List result, int limit) { + + if (limit > 0 && result.size() > limit) { + return result.subList(0, limit); + } + + return result; + } + + record KeysetScrollQuery(Document query, Document fields, Document sort) { + + } + + /** + * Director for keyset scrolling. + */ + static class KeysetScrollDirector { + + private static final KeysetScrollDirector FORWARD = new KeysetScrollDirector(); + private static final KeysetScrollDirector REVERSE = new ReverseKeysetScrollDirector(); + + /** + * Factory method to obtain the right {@link KeysetScrollDirector}. + * + * @param direction + * @return + */ + public static KeysetScrollDirector of(ScrollPosition.Direction direction) { + return direction == Direction.FORWARD ? FORWARD : REVERSE; + } + + public Document getSortObject(String idPropertyName, Query query) { + + Document sortObject = query.isSorted() ? query.getSortObject() : new Document(); + sortObject.put(idPropertyName, 1); + + return sortObject; + } + + public Document getFieldsObject(Document fieldsObject, Document sortObject) { + + // make sure we can extract the keyset + if (!fieldsObject.isEmpty()) { + for (String field : sortObject.keySet()) { + fieldsObject.put(field, 1); + } + } + + return fieldsObject; + } + + public Document createQuery(KeysetScrollPosition keyset, Document queryObject, Document sortObject) { + + Map keysetValues = keyset.getKeys(); + List or = (List) queryObject.getOrDefault("$or", new ArrayList<>()); + List sortKeys = new ArrayList<>(sortObject.keySet()); + + // first query doesn't come with a keyset + if (keysetValues.isEmpty()) { + return queryObject; + } + + if (!keysetValues.keySet().containsAll(sortKeys)) { + throw new IllegalStateException("KeysetScrollPosition does not contain all keyset values"); + } + + // build matrix query for keyset paging that contains sort^2 queries + // reflecting a query that follows sort order semantics starting from the last returned keyset + for (int i = 0; i < sortKeys.size(); i++) { + + Document sortConstraint = new Document(); + + for (int j = 0; j < sortKeys.size(); j++) { + + String sortSegment = sortKeys.get(j); + int sortOrder = sortObject.getInteger(sortSegment); + Object o = keysetValues.get(sortSegment); + + if (j >= i) { // tail segment + if (o instanceof BsonNull) { + throw new IllegalStateException( + "Cannot resume from KeysetScrollPosition. Offending key: '%s' is 'null'".formatted(sortSegment)); + } + sortConstraint.put(sortSegment, new Document(getComparator(sortOrder), o)); + break; + } + + sortConstraint.put(sortSegment, o); + } + + if (!sortConstraint.isEmpty()) { + or.add(sortConstraint); + } + } + + if (!or.isEmpty()) { + queryObject.put("$or", or); + } + + return queryObject; + } + + protected String getComparator(int sortOrder) { + return sortOrder == 1 ? "$gt" : "$lt"; + } + + protected List postPostProcessResults(List list, int limit) { + return getFirst(limit, list); + } + + } + + /** + * Reverse scrolling director variant applying {@link KeysetScrollPosition.Direction#BACKWARD}. In reverse scrolling, + * we need to flip directions for the actual query so that we do not get everything from the top position and apply + * the limit but rather flip the sort direction, apply the limit and then reverse the result to restore the actual + * sort order. + */ + private static class ReverseKeysetScrollDirector extends KeysetScrollDirector { + + @Override + public Document getSortObject(String idPropertyName, Query query) { + + Document sortObject = super.getSortObject(idPropertyName, query); + + // flip sort direction for backward scrolling + + for (String field : sortObject.keySet()) { + sortObject.put(field, sortObject.getInteger(field) == 1 ? -1 : 1); + } + + return sortObject; + } + + @Override + public List postPostProcessResults(List list, int limit) { + + // flip direction of the result list as we need to accomodate for the flipped sort order for proper offset + // querying. + Collections.reverse(list); + + return getLast(limit, list); + } + + } + + /** + * Return the first {@code count} items from the list. + * + * @param count + * @param list + * @return + * @param + */ + static List getFirst(int count, List list) { + + if (count > 0 && list.size() > count) { + return list.subList(0, count); + } + + return list; + } + + /** + * Return the last {@code count} items from the list. + * + * @param count + * @param list + * @return + * @param + */ + static List getLast(int count, List list) { + + if (count > 0 && list.size() > count) { + return list.subList(list.size() - count, list.size()); + } + + return list; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java index c12d4b1005..55a87ecadf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -31,7 +31,7 @@ public interface SessionCallback { /** * Execute operations against a MongoDB instance via session bound {@link MongoOperations}. The session is inferred * directly into the operation so that no further interaction is necessary. - *

    + *
    * Please note that only Spring Data-specific abstractions like {@link MongoOperations#find(Query, Class)} and others * are enhanced with the {@link com.mongodb.session.ClientSession}. When obtaining plain MongoDB gateway objects like * {@link com.mongodb.client.MongoCollection} or {@link com.mongodb.client.MongoDatabase} via eg. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java index ead52ee15e..33ad9d7318 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,7 +23,7 @@ /** * Gateway interface to execute {@link ClientSession} bound operations against MongoDB via a {@link SessionCallback}. - *

    + *
    * The very same bound {@link ClientSession} is used for all invocations of {@code execute} on the instance. * * @author Christoph Strobl @@ -34,7 +34,7 @@ public interface SessionScoped { /** * Executes the given {@link SessionCallback} within the {@link com.mongodb.session.ClientSession}. - *

    + *
    * It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() * closed} when done. * @@ -49,7 +49,7 @@ default T execute(SessionCallback action) { /** * Executes the given {@link SessionCallback} within the {@link com.mongodb.session.ClientSession}. - *

    + *
    * It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() * closed} when done. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoClientDatabaseFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoClientDatabaseFactory.java index df7a4b5a4c..2b51b5e077 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoClientDatabaseFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoClientDatabaseFactory.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -72,31 +72,19 @@ public SimpleMongoClientDatabaseFactory(MongoClient mongoClient, String database * @param mongoInstanceCreated */ SimpleMongoClientDatabaseFactory(MongoClient mongoClient, String databaseName, boolean mongoInstanceCreated) { - super(mongoClient, databaseName, mongoInstanceCreated, new MongoExceptionTranslator()); + super(mongoClient, databaseName, mongoInstanceCreated, MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions) - */ @Override public ClientSession getSession(ClientSessionOptions options) { return getMongoClient().startSession(options); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoDbFactoryBase#closeClient() - */ @Override protected void closeClient() { getMongoClient().close(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoDbFactoryBase#doGetMongoDatabase(java.lang.String) - */ @Override protected MongoDatabase doGetMongoDatabase(String dbName) { return getMongoClient().getDatabase(dbName); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoClientDbFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoClientDbFactory.java deleted file mode 100644 index 3ce3ce5774..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoClientDbFactory.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2018-2021 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import com.mongodb.ConnectionString; -import com.mongodb.client.MongoClient; -import com.mongodb.client.MongoClients; -import com.mongodb.client.MongoDatabase; - -/** - * Factory to create {@link MongoDatabase} instances from a {@link MongoClient} instance. - * - * @author Christoph Strobl - * @since 2.1 - * @deprecated since 3.0, use {@link SimpleMongoClientDatabaseFactory} instead. - */ -@Deprecated -public class SimpleMongoClientDbFactory extends SimpleMongoClientDatabaseFactory { - - /** - * Creates a new {@link SimpleMongoClientDbFactory} instance for the given {@code connectionString}. - * - * @param connectionString connection coordinates for a database connection. Must contain a database name and must not - * be {@literal null} or empty. - * @see MongoDB Connection String reference - */ - public SimpleMongoClientDbFactory(String connectionString) { - this(new ConnectionString(connectionString)); - } - - /** - * Creates a new {@link SimpleMongoClientDbFactory} instance from the given {@link MongoClient}. - * - * @param connectionString connection coordinates for a database connection. Must contain also a database name and not - * be {@literal null}. - */ - public SimpleMongoClientDbFactory(ConnectionString connectionString) { - this(MongoClients.create(connectionString), connectionString.getDatabase(), true); - } - - /** - * Creates a new {@link SimpleMongoClientDbFactory} instance from the given {@link MongoClient}. - * - * @param mongoClient must not be {@literal null}. - * @param databaseName must not be {@literal null} or empty. - */ - public SimpleMongoClientDbFactory(MongoClient mongoClient, String databaseName) { - this(mongoClient, databaseName, false); - } - - /** - * Creates a new {@link SimpleMongoClientDbFactory} instance from the given {@link MongoClient}. - * - * @param mongoClient must not be {@literal null}. - * @param databaseName must not be {@literal null} or empty. - * @param mongoInstanceCreated - */ - private SimpleMongoClientDbFactory(MongoClient mongoClient, String databaseName, boolean mongoInstanceCreated) { - super(mongoClient, databaseName, mongoInstanceCreated); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactory.java index 337109c349..84edf13d57 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactory.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -51,8 +51,7 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React private final String databaseName; private final boolean mongoInstanceCreated; - private final PersistenceExceptionTranslator exceptionTranslator; - + private PersistenceExceptionTranslator exceptionTranslator = MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR; private @Nullable WriteConcern writeConcern; /** @@ -77,15 +76,29 @@ public SimpleReactiveMongoDatabaseFactory(MongoClient mongoClient, String databa private SimpleReactiveMongoDatabaseFactory(MongoClient client, String databaseName, boolean mongoInstanceCreated) { - Assert.notNull(client, "MongoClient must not be null!"); - Assert.hasText(databaseName, "Database name must not be empty!"); + Assert.notNull(client, "MongoClient must not be null"); + Assert.hasText(databaseName, "Database name must not be empty"); Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"), - "Database name must not contain slashes, dots, spaces, quotes, or dollar signs!"); + "Database name must not contain slashes, dots, spaces, quotes, or dollar signs"); this.mongo = client; this.databaseName = databaseName; this.mongoInstanceCreated = mongoInstanceCreated; - this.exceptionTranslator = new MongoExceptionTranslator(); + } + + /** + * Configures the {@link PersistenceExceptionTranslator} to be used. + * + * @param exceptionTranslator the exception translator to set. + * @since 4.4 + */ + public void setExceptionTranslator(PersistenceExceptionTranslator exceptionTranslator) { + this.exceptionTranslator = exceptionTranslator; + } + + @Override + public PersistenceExceptionTranslator getExceptionTranslator() { + return this.exceptionTranslator; } /** @@ -97,21 +110,15 @@ public void setWriteConcern(WriteConcern writeConcern) { this.writeConcern = writeConcern; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getMongoDatabase() - */ + @Override public Mono getMongoDatabase() throws DataAccessException { return getMongoDatabase(databaseName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getMongoDatabase(java.lang.String) - */ + @Override public Mono getMongoDatabase(String dbName) throws DataAccessException { - Assert.hasText(dbName, "Database name must not be empty."); + Assert.hasText(dbName, "Database name must not be empty"); return Mono.fromSupplier(() -> { @@ -126,6 +133,7 @@ public Mono getMongoDatabase(String dbName) throws DataAccessExce * * @see DisposableBean#destroy() */ + @Override public void destroy() throws Exception { if (mongoInstanceCreated) { @@ -133,36 +141,16 @@ public void destroy() throws Exception { } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getExceptionTranslator() - */ - public PersistenceExceptionTranslator getExceptionTranslator() { - return this.exceptionTranslator; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getCodecRegistry() - */ @Override public CodecRegistry getCodecRegistry() { return this.mongo.getDatabase(databaseName).getCodecRegistry(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getSession(com.mongodb.ClientSessionOptions) - */ @Override public Mono getSession(ClientSessionOptions options) { return Mono.from(mongo.startSession(options)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDbFactory#withSession(com.mongodb.session.ClientSession) - */ @Override public ReactiveMongoDatabaseFactory withSession(ClientSession session) { return new ClientSessionBoundMongoDbFactory(session, this); @@ -186,64 +174,36 @@ static final class ClientSessionBoundMongoDbFactory implements ReactiveMongoData this.delegate = delegate; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase() - */ @Override public Mono getMongoDatabase() throws DataAccessException { return delegate.getMongoDatabase().map(this::decorateDatabase); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase(java.lang.String) - */ @Override public Mono getMongoDatabase(String dbName) throws DataAccessException { return delegate.getMongoDatabase(dbName).map(this::decorateDatabase); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getExceptionTranslator() - */ @Override public PersistenceExceptionTranslator getExceptionTranslator() { return delegate.getExceptionTranslator(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getCodecRegistry() - */ @Override public CodecRegistry getCodecRegistry() { return delegate.getCodecRegistry(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getSession(com.mongodb.ClientSessionOptions) - */ @Override public Mono getSession(ClientSessionOptions options) { return delegate.getSession(options); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#withSession(com.mongodb.session.ClientSession) - */ @Override public ReactiveMongoDatabaseFactory withSession(ClientSession session) { return delegate.withSession(session); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#isTransactionActive() - */ @Override public boolean isTransactionActive() { return session != null && session.hasActiveTransaction(); @@ -283,7 +243,7 @@ public ReactiveMongoDatabaseFactory getDelegate() { } @Override - public boolean equals(Object o) { + public boolean equals(@Nullable Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientSettingsFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SortingQueryCursorPreparer.java similarity index 60% rename from spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientSettingsFactoryBean.java rename to spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SortingQueryCursorPreparer.java index fd7ea5ab7a..c69fb4ad15 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientSettingsFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SortingQueryCursorPreparer.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,17 +15,18 @@ */ package org.springframework.data.mongodb.core; -import com.mongodb.MongoClientSettings; +import org.bson.Document; +import org.springframework.lang.Nullable; /** - * A factory bean for construction of a {@link MongoClientSettings} instance to be used with the async MongoDB driver. + * {@link CursorPreparer} that exposes its {@link Document sort document}. * - * @author Mark Paluch * @author Christoph Strobl - * @since 2.0 - * @deprecated since 3.0 - Use {@link MongoClientSettingsFactoryBean} instead. + * @since 4.4.3 */ -@Deprecated -public class ReactiveMongoClientSettingsFactoryBean extends MongoClientSettingsFactoryBean { +interface SortingQueryCursorPreparer extends CursorPreparer { + + @Nullable + Document getSortObject(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ViewOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ViewOptions.java new file mode 100644 index 0000000000..e50e1088cb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ViewOptions.java @@ -0,0 +1,65 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Optional; + +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.lang.Nullable; + +/** + * Immutable object holding additional options to be applied when creating a MongoDB + * views. + * + * @author Christoph Strobl + * @since 4.0 + */ +public class ViewOptions { + + private final @Nullable Collation collation; + + static ViewOptions none() { + return new ViewOptions(); + } + + /** + * Creates new instance of {@link ViewOptions}. + */ + public ViewOptions() { + this(null); + } + + private ViewOptions(@Nullable Collation collation) { + this.collation = collation; + } + + /** + * Get the {@link Collation} to be set. + * + * @return {@link Optional#empty()} if not set. + */ + public Optional getCollation() { + return Optional.ofNullable(collation); + } + + /** + * @param collation the {@link Collation} to use for language-specific string comparison. + * @return new instance of {@link ViewOptions}. + */ + public ViewOptions collation(Collation collation) { + return new ViewOptions(collation); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernAware.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernAware.java new file mode 100644 index 0000000000..d6e4119b20 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernAware.java @@ -0,0 +1,42 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.lang.Nullable; + +import com.mongodb.WriteConcern; + +/** + * Interface indicating a component that contains and exposes an {@link WriteConcern}. + * + * @author Christoph Strobl + * @since 4.3 + */ +public interface WriteConcernAware { + + /** + * @return the {@link WriteConcern} to apply or {@literal null} if none set. + */ + @Nullable + WriteConcern getWriteConcern(); + + /** + * @return {@literal true} if a {@link com.mongodb.WriteConcern} is set. + */ + default boolean hasWriteConcern() { + return getWriteConcern() != null; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernResolver.java index c7852b60b9..8df4171844 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernResolver.java @@ -1,5 +1,5 @@ /* - * Copyright 2011-2021 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteResultChecking.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteResultChecking.java index 2f88e8c14c..fbefe4a075 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteResultChecking.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteResultChecking.java @@ -1,5 +1,5 @@ /* - * Copyright 2012-2021 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AbstractAggregationExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AbstractAggregationExpression.java index 07fa9023cc..d4cdece411 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AbstractAggregationExpression.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AbstractAggregationExpression.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2018. the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,12 +19,16 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.bson.Document; -import org.springframework.data.mongodb.core.aggregation.Aggregation.SystemVariable; + +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Order; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; @@ -44,9 +48,6 @@ protected AbstractAggregationExpression(Object value) { this.value = value; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { return toDocument(this.value, context); @@ -68,12 +69,35 @@ protected static List asFields(String... fieldRefs) { @SuppressWarnings("unchecked") private Object unpack(Object value, AggregationOperationContext context) { - if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDocument(context); + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); + } + + if (value instanceof Field field) { + return context.getReference(field).toString(); } - if (value instanceof Field) { - return context.getReference((Field) value).toString(); + if (value instanceof Fields fields) { + + List mapped = new ArrayList<>(fields.size()); + + for (Field field : fields) { + mapped.add(unpack(field, context)); + } + + return mapped; + } + + if (value instanceof Sort sort) { + + Document sortDoc = new Document(); + for (Order order : sort) { + + // Check reference + FieldReference reference = context.getReference(order.getProperty()); + sortDoc.put(reference.getRaw(), order.isAscending() ? 1 : -1); + } + return sortDoc; } if (value instanceof List) { @@ -81,7 +105,9 @@ private Object unpack(Object value, AggregationOperationContext context) { List sourceList = (List) value; List mappedList = new ArrayList<>(sourceList.size()); - sourceList.stream().map((item) -> unpack(item, context)).forEach(mappedList::add); + for (Object o : sourceList) { + mappedList.add(unpack(o, context)); + } return mappedList; } @@ -110,8 +136,8 @@ protected List append(Object value, Expand expandList) { List clone = new ArrayList<>((List) this.value); - if (value instanceof Collection && Expand.EXPAND_VALUES.equals(expandList)) { - clone.addAll((Collection) value); + if (value instanceof Collection collection && Expand.EXPAND_VALUES.equals(expandList)) { + clone.addAll(collection); } else { clone.add(value); } @@ -133,21 +159,53 @@ protected List append(Object value) { return append(value, Expand.EXPAND_VALUES); } - @SuppressWarnings({ "unchecked", "rawtypes" }) + @SuppressWarnings({ "unchecked" }) protected Map append(String key, Object value) { - Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map!"); + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); - Map clone = new LinkedHashMap<>((java.util.Map) this.value); + return append((Map) this.value, key, value); + } + + private Map append(Map existing, String key, Object value) { + + Map clone = new LinkedHashMap<>(existing); clone.put(key, value); return clone; + } + + @SuppressWarnings("rawtypes") + protected Map appendTo(String key, Object value) { + + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); + + if (this.value instanceof Map map) { + + Map target = new HashMap<>(map); + if (!target.containsKey(key)) { + target.put(key, value); + return target; + } + target.computeIfPresent(key, (k, v) -> { + + if (v instanceof List list) { + List targetList = new ArrayList<>(list); + targetList.add(value); + return targetList; + } + return Arrays.asList(v, value); + }); + return target; + } + throw new IllegalStateException( + String.format("Cannot append value to %s type", ObjectUtils.nullSafeClassName(this.value))); } @SuppressWarnings({ "unchecked", "rawtypes" }) protected Map remove(String key) { - Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map!"); + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); Map clone = new LinkedHashMap<>((java.util.Map) this.value); clone.remove(key); @@ -166,7 +224,7 @@ protected Map remove(String key) { @SuppressWarnings({ "unchecked" }) protected Map appendAt(int index, String key, Object value) { - Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map!"); + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); Map clone = new LinkedHashMap<>(); @@ -226,11 +284,15 @@ protected T get(int index) { @SuppressWarnings("unchecked") protected T get(Object key) { - Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map!"); + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); return (T) ((Map) this.value).get(key); } + protected boolean isArgumentMap() { + return this.value instanceof Map; + } + /** * Get the argument map. * @@ -240,7 +302,7 @@ protected T get(Object key) { @SuppressWarnings("unchecked") protected Map argumentMap() { - Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map!"); + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); return Collections.unmodifiableMap((java.util.Map) value); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java index 6698b932f8..cf6485c230 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,8 +15,11 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Map; import org.bson.Document; import org.springframework.util.Assert; @@ -25,6 +28,7 @@ * Gateway to {@literal accumulator} aggregation operations. * * @author Christoph Strobl + * @author Julia Lee * @since 1.10 * @soundtrack Rage Against The Machine - Killing In The Name */ @@ -52,6 +56,7 @@ public static AccumulatorOperatorFactory valueOf(AggregationExpression expressio /** * @author Christoph Strobl + * @author Julia Lee */ public static class AccumulatorOperatorFactory { @@ -65,7 +70,7 @@ public static class AccumulatorOperatorFactory { */ public AccumulatorOperatorFactory(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.fieldReference = fieldReference; this.expression = null; } @@ -77,7 +82,7 @@ public AccumulatorOperatorFactory(String fieldReference) { */ public AccumulatorOperatorFactory(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); this.fieldReference = null; this.expression = expression; } @@ -112,6 +117,17 @@ public Max max() { return usesFieldRef() ? Max.maxOf(fieldReference) : Max.maxOf(expression); } + /** + * Creates new {@link AggregationExpression} that takes the associated numeric value expression and returns the + * requested number of maximum values. + * + * @return new instance of {@link Max}. + * @since 4.0 + */ + public Max max(int numberOfResults) { + return max().limit(numberOfResults); + } + /** * Creates new {@link AggregationExpression} that takes the associated numeric value expression and returns the * minimum value. @@ -122,6 +138,17 @@ public Min min() { return usesFieldRef() ? Min.minOf(fieldReference) : Min.minOf(expression); } + /** + * Creates new {@link AggregationExpression} that takes the associated numeric value expression and returns the + * requested number of maximum values. + * + * @return new instance of {@link Max}. + * @since 4.0 + */ + public Min min(int numberOfResults) { + return min().limit(numberOfResults); + } + /** * Creates new {@link AggregationExpression} that takes the associated numeric value expression and calculates the * population standard deviation of the input values. @@ -142,11 +169,142 @@ public StdDevSamp stdDevSamp() { return usesFieldRef() ? StdDevSamp.stdDevSampOf(fieldReference) : StdDevSamp.stdDevSampOf(expression); } + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the population covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(String fieldReference) { + return covariancePop().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the population covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(AggregationExpression expression) { + return covariancePop().and(expression); + } + + private CovariancePop covariancePop() { + return usesFieldRef() ? CovariancePop.covariancePopOf(fieldReference) : CovariancePop.covariancePopOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the sample covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(String fieldReference) { + return covarianceSamp().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the sample covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(AggregationExpression expression) { + return covarianceSamp().and(expression); + } + + private CovarianceSamp covarianceSamp() { + return usesFieldRef() ? CovarianceSamp.covarianceSampOf(fieldReference) + : CovarianceSamp.covarianceSampOf(expression); + } + + /** + * Creates new {@link ExpMovingAvgBuilder} that to build {@link AggregationExpression expMovingAvg} that calculates + * the exponential moving average of numeric values + * + * @return new instance of {@link ExpMovingAvg}. + * @since 3.3 + */ + public ExpMovingAvgBuilder expMovingAvg() { + + ExpMovingAvg expMovingAvg = usesFieldRef() ? ExpMovingAvg.expMovingAvgOf(fieldReference) + : ExpMovingAvg.expMovingAvgOf(expression); + return new ExpMovingAvgBuilder() { + + @Override + public ExpMovingAvg historicalDocuments(int numberOfHistoricalDocuments) { + return expMovingAvg.n(numberOfHistoricalDocuments); + } + + @Override + public ExpMovingAvg alpha(double exponentialDecayValue) { + return expMovingAvg.alpha(exponentialDecayValue); + } + }; + } + + /** + * Creates new {@link AggregationExpression} that calculates the requested percentile(s) of the + * associated numeric value expression. + * + * @return new instance of {@link Percentile}. + * @param percentages must not be {@literal null}. + * @since 4.2 + */ + public Percentile percentile(Double... percentages) { + Percentile percentile = usesFieldRef() ? Percentile.percentileOf(fieldReference) + : Percentile.percentileOf(expression); + return percentile.percentages(percentages); + } + + /** + * Creates new {@link AggregationExpression} that calculates the median of the associated numeric value expression. + * + * @return new instance of {@link Median}. + * @since 4.2 + */ + public Median median() { + return usesFieldRef() ? Median.medianOf(fieldReference) : Median.medianOf(expression); + } + private boolean usesFieldRef() { return fieldReference != null; } } + /** + * Builder for {@link ExpMovingAvg}. + * + * @since 3.3 + */ + public interface ExpMovingAvgBuilder { + + /** + * Define the number of historical documents with significant mathematical weight. + * + * @param numberOfHistoricalDocuments + * @return new instance of {@link ExpMovingAvg}. + */ + ExpMovingAvg historicalDocuments(int numberOfHistoricalDocuments); + + /** + * Define the exponential decay value. + * + * @param exponentialDecayValue + * @return new instance of {@link ExpMovingAvg}. + */ + ExpMovingAvg alpha(double exponentialDecayValue); + + } + /** * {@link AggregationExpression} for {@code $sum}. * @@ -171,7 +329,7 @@ protected String getMongoMethod() { */ public static Sum sumOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Sum(asFields(fieldReference)); } @@ -183,7 +341,7 @@ public static Sum sumOf(String fieldReference) { */ public static Sum sumOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Sum(Collections.singletonList(expression)); } @@ -196,7 +354,7 @@ public static Sum sumOf(AggregationExpression expression) { */ public Sum and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Sum(append(Fields.field(fieldReference))); } @@ -209,7 +367,7 @@ public Sum and(String fieldReference) { */ public Sum and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Sum(append(expression)); } @@ -223,21 +381,16 @@ public Sum and(AggregationExpression expression) { */ public Sum and(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Sum(append(value)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.AbstractAggregationExpression#toDocument(java.lang.Object, org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override @SuppressWarnings("unchecked") public Document toDocument(Object value, AggregationOperationContext context) { - if (value instanceof List) { - if (((List) value).size() == 1) { - return super.toDocument(((List) value).iterator().next(), context); - } + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); } return super.toDocument(value, context); @@ -268,7 +421,7 @@ protected String getMongoMethod() { */ public static Avg avgOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Avg(asFields(fieldReference)); } @@ -280,7 +433,7 @@ public static Avg avgOf(String fieldReference) { */ public static Avg avgOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Avg(Collections.singletonList(expression)); } @@ -293,7 +446,7 @@ public static Avg avgOf(AggregationExpression expression) { */ public Avg and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Avg(append(Fields.field(fieldReference))); } @@ -306,21 +459,16 @@ public Avg and(String fieldReference) { */ public Avg and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Avg(append(expression)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.AbstractAggregationExpression#toDocument(java.lang.Object, org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override @SuppressWarnings("unchecked") public Document toDocument(Object value, AggregationOperationContext context) { - if (value instanceof List) { - if (((List) value).size() == 1) { - return super.toDocument(((List) value).iterator().next(), context); - } + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); } return super.toDocument(value, context); @@ -340,7 +488,7 @@ private Max(Object value) { @Override protected String getMongoMethod() { - return "$max"; + return contains("n") ? "$maxN" : "$max"; } /** @@ -351,8 +499,8 @@ protected String getMongoMethod() { */ public static Max maxOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new Max(asFields(fieldReference)); + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Max(Collections.singletonMap("input", Fields.field(fieldReference))); } /** @@ -363,8 +511,8 @@ public static Max maxOf(String fieldReference) { */ public static Max maxOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); - return new Max(Collections.singletonList(expression)); + Assert.notNull(expression, "Expression must not be null"); + return new Max(Collections.singletonMap("input", expression)); } /** @@ -376,8 +524,8 @@ public static Max maxOf(AggregationExpression expression) { */ public Max and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new Max(append(Fields.field(fieldReference))); + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Max(appendTo("input", Fields.field(fieldReference))); } /** @@ -389,21 +537,35 @@ public Max and(String fieldReference) { */ public Max and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); - return new Max(append(expression)); + Assert.notNull(expression, "Expression must not be null"); + return new Max(appendTo("input", expression)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.AbstractAggregationExpression#toDocument(java.lang.Object, org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + /** + * Creates new {@link Max} that returns the given number of maximum values ({@literal $maxN}). + * NOTE: Cannot be used with more than one {@literal input} value. + * + * @param numberOfResults + * @return new instance of {@link Max}. */ + public Max limit(int numberOfResults) { + return new Max(append("n", numberOfResults)); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + if (get("n") == null) { + return toDocument(get("input"), context); + } + return super.toDocument(context); + } + @Override @SuppressWarnings("unchecked") public Document toDocument(Object value, AggregationOperationContext context) { - if (value instanceof List) { - if (((List) value).size() == 1) { - return super.toDocument(((List) value).iterator().next(), context); - } + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); } return super.toDocument(value, context); @@ -423,7 +585,7 @@ private Min(Object value) { @Override protected String getMongoMethod() { - return "$min"; + return contains("n") ? "$minN" : "$min"; } /** @@ -434,8 +596,8 @@ protected String getMongoMethod() { */ public static Min minOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new Min(asFields(fieldReference)); + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Min(Collections.singletonMap("input", Fields.field(fieldReference))); } /** @@ -446,8 +608,8 @@ public static Min minOf(String fieldReference) { */ public static Min minOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); - return new Min(Collections.singletonList(expression)); + Assert.notNull(expression, "Expression must not be null"); + return new Min(Collections.singletonMap("input", expression)); } /** @@ -459,8 +621,8 @@ public static Min minOf(AggregationExpression expression) { */ public Min and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new Min(append(Fields.field(fieldReference))); + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Min(appendTo("input", Fields.field(fieldReference))); } /** @@ -472,21 +634,36 @@ public Min and(String fieldReference) { */ public Min and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); - return new Min(append(expression)); + Assert.notNull(expression, "Expression must not be null"); + return new Min(appendTo("input", expression)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.AbstractAggregationExpression#toDocument(java.lang.Object, org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + /** + * Creates new {@link Min} that returns the given number of minimum values ({@literal $minN}). + * NOTE: Cannot be used with more than one {@literal input} value. + * + * @param numberOfResults + * @return new instance of {@link Min}. */ + public Min limit(int numberOfResults) { + return new Min(append("n", numberOfResults)); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + if (get("n") == null) { + return toDocument(get("input"), context); + } + return super.toDocument(context); + } + @Override @SuppressWarnings("unchecked") public Document toDocument(Object value, AggregationOperationContext context) { - if (value instanceof List) { - if (((List) value).size() == 1) { - return super.toDocument(((List) value).iterator().next(), context); - } + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); } return super.toDocument(value, context); @@ -517,7 +694,7 @@ protected String getMongoMethod() { */ public static StdDevPop stdDevPopOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new StdDevPop(asFields(fieldReference)); } @@ -529,7 +706,7 @@ public static StdDevPop stdDevPopOf(String fieldReference) { */ public static StdDevPop stdDevPopOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new StdDevPop(Collections.singletonList(expression)); } @@ -542,7 +719,7 @@ public static StdDevPop stdDevPopOf(AggregationExpression expression) { */ public StdDevPop and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new StdDevPop(append(Fields.field(fieldReference))); } @@ -555,21 +732,16 @@ public StdDevPop and(String fieldReference) { */ public StdDevPop and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new StdDevPop(append(expression)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.AbstractAggregationExpression#toDocument(java.lang.Object, org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override @SuppressWarnings("unchecked") public Document toDocument(Object value, AggregationOperationContext context) { - if (value instanceof List) { - if (((List) value).size() == 1) { - return super.toDocument(((List) value).iterator().next(), context); - } + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); } return super.toDocument(value, context); @@ -600,7 +772,7 @@ protected String getMongoMethod() { */ public static StdDevSamp stdDevSampOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new StdDevSamp(asFields(fieldReference)); } @@ -612,7 +784,7 @@ public static StdDevSamp stdDevSampOf(String fieldReference) { */ public static StdDevSamp stdDevSampOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new StdDevSamp(Collections.singletonList(expression)); } @@ -625,7 +797,7 @@ public static StdDevSamp stdDevSampOf(AggregationExpression expression) { */ public StdDevSamp and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new StdDevSamp(append(Fields.field(fieldReference))); } @@ -638,24 +810,360 @@ public StdDevSamp and(String fieldReference) { */ public StdDevSamp and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new StdDevSamp(append(expression)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.AbstractAggregationExpression#toDocument(java.lang.Object, org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override @SuppressWarnings("unchecked") public Document toDocument(Object value, AggregationOperationContext context) { - if (value instanceof List) { - if (((List) value).size() == 1) { - return super.toDocument(((List) value).iterator().next(), context); - } + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); } return super.toDocument(value, context); } } + + /** + * {@link AggregationExpression} for {@code $covariancePop}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class CovariancePop extends AbstractAggregationExpression { + + private CovariancePop(Object value) { + super(value); + } + + /** + * Creates new {@link CovariancePop}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public static CovariancePop covariancePopOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new CovariancePop(asFields(fieldReference)); + } + + /** + * Creates new {@link CovariancePop}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public static CovariancePop covariancePopOf(AggregationExpression expression) { + return new CovariancePop(Collections.singletonList(expression)); + } + + /** + * Creates new {@link CovariancePop} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public CovariancePop and(String fieldReference) { + return new CovariancePop(append(asFields(fieldReference))); + } + + /** + * Creates new {@link CovariancePop} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public CovariancePop and(AggregationExpression expression) { + return new CovariancePop(append(expression)); + } + + @Override + protected String getMongoMethod() { + return "$covariancePop"; + } + } + + /** + * {@link AggregationExpression} for {@code $covarianceSamp}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class CovarianceSamp extends AbstractAggregationExpression { + + private CovarianceSamp(Object value) { + super(value); + } + + /** + * Creates new {@link CovarianceSamp}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public static CovarianceSamp covarianceSampOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new CovarianceSamp(asFields(fieldReference)); + } + + /** + * Creates new {@link CovarianceSamp}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public static CovarianceSamp covarianceSampOf(AggregationExpression expression) { + return new CovarianceSamp(Collections.singletonList(expression)); + } + + /** + * Creates new {@link CovarianceSamp} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public CovarianceSamp and(String fieldReference) { + return new CovarianceSamp(append(asFields(fieldReference))); + } + + /** + * Creates new {@link CovarianceSamp} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public CovarianceSamp and(AggregationExpression expression) { + return new CovarianceSamp(append(expression)); + } + + @Override + protected String getMongoMethod() { + return "$covarianceSamp"; + } + } + + /** + * {@link ExpMovingAvg} calculates the exponential moving average of numeric values. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class ExpMovingAvg extends AbstractAggregationExpression { + + private ExpMovingAvg(Object value) { + super(value); + } + + /** + * Create a new {@link ExpMovingAvg} by defining the field holding the value to be used as input. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ExpMovingAvg}. + */ + public static ExpMovingAvg expMovingAvgOf(String fieldReference) { + return new ExpMovingAvg(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Create a new {@link ExpMovingAvg} by defining the {@link AggregationExpression expression} to compute the value + * to be used as input. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ExpMovingAvg}. + */ + public static ExpMovingAvg expMovingAvgOf(AggregationExpression expression) { + return new ExpMovingAvg(Collections.singletonMap("input", expression)); + } + + /** + * Define the number of historical documents with significant mathematical weight.
    + * Specify either {@link #n(int) N} or {@link #alpha(double) aplha}. Not both! + * + * @param numberOfHistoricalDocuments + * @return new instance of {@link ExpMovingAvg}. + */ + public ExpMovingAvg n/*umber of historical documents*/(int numberOfHistoricalDocuments) { + return new ExpMovingAvg(append("N", numberOfHistoricalDocuments)); + } + + /** + * Define the exponential decay value.
    + * Specify either {@link #alpha(double) aplha} or {@link #n(int) N}. Not both! + * + * @param exponentialDecayValue + * @return new instance of {@link ExpMovingAvg}. + */ + public ExpMovingAvg alpha(double exponentialDecayValue) { + return new ExpMovingAvg(append("alpha", exponentialDecayValue)); + } + + @Override + protected String getMongoMethod() { + return "$expMovingAvg"; + } + } + + /** + * {@link AggregationExpression} for {@code $percentile}. + * + * @author Julia Lee + * @since 4.2 + */ + public static class Percentile extends AbstractAggregationExpression { + + private Percentile(Object value) { + super(value); + } + + /** + * Creates new {@link Percentile}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Percentile}. + */ + public static Percentile percentileOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + Map fields = new HashMap<>(); + fields.put("input", Fields.field(fieldReference)); + fields.put("method", "approximate"); + return new Percentile(fields); + } + + /** + * Creates new {@link Percentile}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Percentile}. + */ + public static Percentile percentileOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + Map fields = new HashMap<>(); + fields.put("input", expression); + fields.put("method", "approximate"); + return new Percentile(fields); + } + + /** + * Define the percentile value(s) that must resolve to percentages in the range {@code 0.0 - 1.0} inclusive. + * + * @param percentages must not be {@literal null}. + * @return new instance of {@link Percentile}. + */ + public Percentile percentages(Double... percentages) { + + Assert.notEmpty(percentages, "Percentages must not be null or empty"); + return new Percentile(append("p", Arrays.asList(percentages))); + } + + /** + * Creates new {@link Percentile} with all previously added inputs appending the given one.
    + * NOTE: Only possible in {@code $project} stage. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Percentile}. + */ + public Percentile and(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Percentile(appendTo("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link Percentile} with all previously added inputs appending the given one.
    + * NOTE: Only possible in {@code $project} stage. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Percentile}. + */ + public Percentile and(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Percentile(appendTo("input", expression)); + } + + @Override + protected String getMongoMethod() { + return "$percentile"; + } + } + + /** + * {@link AggregationExpression} for {@code $median}. + * + * @author Julia Lee + * @since 4.2 + */ + public static class Median extends AbstractAggregationExpression { + + private Median(Object value) { + super(value); + } + + /** + * Creates new {@link Median}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Median}. + */ + public static Median medianOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + Map fields = new HashMap<>(); + fields.put("input", Fields.field(fieldReference)); + fields.put("method", "approximate"); + return new Median(fields); + } + + /** + * Creates new {@link Median}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Median}. + */ + public static Median medianOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + Map fields = new HashMap<>(); + fields.put("input", expression); + fields.put("method", "approximate"); + return new Median(fields); + } + + /** + * Creates new {@link Median} with all previously added inputs appending the given one.
    + * NOTE: Only possible in {@code $project} stage. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Median}. + */ + public Median and(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Median(appendTo("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link Median} with all previously added inputs appending the given one.
    + * NOTE: Only possible in {@code $project} stage. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Median}. + */ + public Median and(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Median(appendTo("input", expression)); + } + + @Override + protected String getMongoMethod() { + return "$median"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java index 3f3dd125d1..b79d978b8b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 the original author or authors. + * Copyright 2020-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -31,6 +31,7 @@ * * * @author Christoph Strobl + * @author Kim Sumin * @since 3.0 * @see MongoDB Aggregation * Framework: $addFields @@ -99,10 +100,6 @@ public AddFieldsOperationBuilder and() { return new AddFieldsOperationBuilder(getValueMap()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.DocumentEnhancingOperation#mongoOperator() - */ @Override protected String mongoOperator() { return "$addFields"; @@ -152,7 +149,7 @@ public AddFieldsOperationBuilder withValue(Object value) { @Override public AddFieldsOperationBuilder withValueOf(Object value) { - valueMap.put(field, value instanceof String ? Fields.fields((String) value) : value); + valueMap.put(field, value instanceof String stringValue ? Fields.field(stringValue) : value); return AddFieldsOperationBuilder.this; } @@ -201,4 +198,5 @@ public interface ValueAppender { AddFieldsOperationBuilder withValueOfExpression(String operation, Object... values); } } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java index cecc8f2554..45de38ed21 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java @@ -1,5 +1,5 @@ /* - * Copyright 2013-2021 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,20 +21,22 @@ import java.util.List; import org.bson.Document; +import org.bson.conversions.Bson; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.mongodb.core.aggregation.AddFieldsOperation.AddFieldsOperationBuilder; import org.springframework.data.mongodb.core.aggregation.CountOperation.CountOperationBuilder; import org.springframework.data.mongodb.core.aggregation.FacetOperation.FacetOperationBuilder; import org.springframework.data.mongodb.core.aggregation.GraphLookupOperation.StartWithBuilder; +import org.springframework.data.mongodb.core.aggregation.LookupOperation.LookupOperationBuilder; import org.springframework.data.mongodb.core.aggregation.MergeOperation.MergeOperationBuilder; import org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplaceRootDocumentOperationBuilder; import org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplaceRootOperationBuilder; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.SerializationUtils; -import org.springframework.lang.Nullable; import org.springframework.util.Assert; /** @@ -50,6 +52,7 @@ * @author Nikolay Bogdanov * @author Gustavo de Geus * @author Jérôme Guyon + * @author Sangyong Choi * @since 1.3 */ public class Aggregation { @@ -138,7 +141,7 @@ public static AggregationUpdate newUpdate(AggregationOperation... operations) { */ public Aggregation withOptions(AggregationOptions options) { - Assert.notNull(options, "AggregationOptions must not be null."); + Assert.notNull(options, "AggregationOptions must not be null"); return new Aggregation(this.pipeline.getOperations(), options); } @@ -177,7 +180,7 @@ protected Aggregation(AggregationOperation... aggregationOperations) { */ protected static List asAggregationList(AggregationOperation... aggregationOperations) { - Assert.notEmpty(aggregationOperations, "AggregationOperations must not be null or empty!"); + Assert.notEmpty(aggregationOperations, "AggregationOperations must not be null or empty"); return Arrays.asList(aggregationOperations); } @@ -199,8 +202,8 @@ protected Aggregation(List aggregationOperations) { */ protected Aggregation(List aggregationOperations, AggregationOptions options) { - Assert.notNull(aggregationOperations, "AggregationOperations must not be null!"); - Assert.notNull(options, "AggregationOptions must not be null!"); + Assert.notNull(aggregationOperations, "AggregationOperations must not be null"); + Assert.notNull(options, "AggregationOptions must not be null"); this.pipeline = new AggregationPipeline(aggregationOperations); this.options = options; @@ -222,12 +225,11 @@ public AggregationOptions getOptions() { * @return */ public static String previousOperation() { - return "_id"; + return FieldName.ID.name(); } /** - * Obtain an {@link AddFieldsOperationBuilder builder} instance to create a new {@link AddFieldsOperation}. - *

    + * Obtain an {@link AddFieldsOperationBuilder builder} instance to create a new {@link AddFieldsOperation}.
    * Starting in version 4.2, MongoDB adds a new aggregation pipeline stage {@link AggregationUpdate#set $set} that is * an alias for {@code $addFields}. * @@ -239,6 +241,40 @@ public static AddFieldsOperationBuilder addFields() { return AddFieldsOperation.builder(); } + /** + * Creates a new {@link AggregationOperation} taking the given {@link Bson bson value} as is.
    + * + *

    +	 * Aggregation.stage(Aggregates.search(exists(fieldPath("..."))));
    +	 * 
    + * + * Field mapping against a potential domain type or previous aggregation stages will not happen. + * + * @param aggregationOperation the must not be {@literal null}. + * @return new instance of {@link AggregationOperation}. + * @since 4.0 + */ + public static AggregationOperation stage(Bson aggregationOperation) { + return new BasicAggregationOperation(aggregationOperation); + } + + /** + * Creates a new {@link AggregationOperation} taking the given {@link String json value} as is.
    + * + *
    +	 * Aggregation.stage("{ $search : { near : { path : 'released' , origin : ... } } }");
    +	 * 
    + * + * Field mapping against a potential domain type or previous aggregation stages will not happen. + * + * @param json the JSON representation of the pipeline stage. Must not be {@literal null}. + * @return new instance of {@link AggregationOperation}. + * @since 4.0 + */ + public static AggregationOperation stage(String json) { + return new BasicAggregationOperation(json); + } + /** * Creates a new {@link ProjectionOperation} including the given fields. * @@ -268,7 +304,7 @@ public static ProjectionOperation project(Fields fields) { */ public static ProjectionOperation project(Class type) { - Assert.notNull(type, "Type must not be null!"); + Assert.notNull(type, "Type must not be null"); return new ProjectionOperation(type); } @@ -345,9 +381,9 @@ public static UnwindOperation unwind(String field, String arrayIndex) { } /** - * Factory method to create a new {@link UnwindOperation} for the field with the given nameincluding the name of a new - * field to hold the array index of the element as {@code arrayIndex} using {@code preserveNullAndEmptyArrays}. Note - * that extended unwind is supported in MongoDB version 3.2+. + * Factory method to create a new {@link UnwindOperation} for the field with the given name, including the name of a + * new field to hold the array index of the element as {@code arrayIndex} using {@code preserveNullAndEmptyArrays}. + * Note that extended unwind is supported in MongoDB version 3.2+. * * @param field must not be {@literal null} or empty. * @param arrayIndex must not be {@literal null} or empty. @@ -392,6 +428,20 @@ public static StartWithBuilder graphLookup(String fromCollection) { return GraphLookupOperation.builder().from(fromCollection); } + /** + * Creates a new {@link VectorSearchOperation} by starting from the {@code indexName} to use. + * + * @param indexName must not be {@literal null} or empty. + * @return new instance of {@link VectorSearchOperation.PathContributor}. + * @since 4.5 + */ + public static VectorSearchOperation.PathContributor vectorSearch(String indexName) { + + Assert.hasText(indexName, "Index name must not be null or empty"); + + return VectorSearchOperation.search(indexName); + } + /** * Factory method to create a new {@link SortOperation} for the given {@link Sort}. * @@ -435,18 +485,6 @@ public static SortByCountOperation sortByCount(AggregationExpression groupAndSor return new SortByCountOperation(groupAndSortExpression); } - /** - * Creates a new {@link SkipOperation} skipping the given number of elements. - * - * @param elementsToSkip must not be less than zero. - * @return new instance of {@link SkipOperation}. - * @deprecated prepare to get this one removed in favor of {@link #skip(long)}. - */ - @Deprecated - public static SkipOperation skip(int elementsToSkip) { - return new SkipOperation(elementsToSkip); - } - /** * Creates a new {@link SkipOperation} skipping the given number of elements. * @@ -499,6 +537,17 @@ public static MatchOperation match(CriteriaDefinition criteria) { return new MatchOperation(criteria); } + /** + * Creates a new {@link MatchOperation} using the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link MatchOperation}. + * @since 3.3 + */ + public static MatchOperation match(AggregationExpression expression) { + return new MatchOperation(expression); + } + /** * Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the {@code distanceField}. The * {@code distanceField} defines output field that contains the calculated distance. @@ -632,6 +681,23 @@ public static LookupOperation lookup(Field from, Field localField, Field foreign return new LookupOperation(from, localField, foreignField, as); } + /** + * Entrypoint for creating {@link LookupOperation $lookup} using a fluent builder API. + * + *
    +	 * Aggregation.lookup().from("restaurants").localField("restaurant_name").foreignField("name")
    +	 * 		.let(newVariable("orders_drink").forField("drink"))
    +	 * 		.pipeline(match(ctx -> new Document("$expr", new Document("$in", List.of("$$orders_drink", "$beverages")))))
    +	 * 		.as("matches")
    +	 * 
    + * + * @return new instance of {@link LookupOperationBuilder}. + * @since 4.1 + */ + public static LookupOperationBuilder lookup() { + return new LookupOperationBuilder(); + } + /** * Creates a new {@link CountOperationBuilder}. * @@ -714,8 +780,7 @@ public AggregationPipeline getPipeline() { } /** - * Converts this {@link Aggregation} specification to a {@link Document}. - *

    + * Converts this {@link Aggregation} specification to a {@link Document}.
    * MongoDB requires as of 3.6 cursor-based aggregation. Use {@link #toPipeline(AggregationOperationContext)} to render * an aggregation pipeline. * @@ -730,60 +795,8 @@ public Document toDocument(String inputCollectionName, AggregationOperationConte return options.applyAndReturnPotentiallyChangedCommand(command); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return SerializationUtils.serializeToJsonSafely(toDocument("__collection__", DEFAULT_CONTEXT)); } - - /** - * Describes the system variables available in MongoDB aggregation framework pipeline expressions. - * - * @author Thomas Darimont - * @author Christoph Strobl - * @see Aggregation Variables. - */ - enum SystemVariable { - - ROOT, CURRENT, REMOVE; - - private static final String PREFIX = "$$"; - - /** - * Return {@literal true} if the given {@code fieldRef} denotes a well-known system variable, {@literal false} - * otherwise. - * - * @param fieldRef may be {@literal null}. - * @return {@literal true} if the given field refers to a {@link SystemVariable}. - */ - public static boolean isReferingToSystemVariable(@Nullable String fieldRef) { - - if (fieldRef == null || !fieldRef.startsWith(PREFIX) || fieldRef.length() <= 2) { - return false; - } - - int indexOfFirstDot = fieldRef.indexOf('.'); - String candidate = fieldRef.substring(2, indexOfFirstDot == -1 ? fieldRef.length() : indexOfFirstDot); - - for (SystemVariable value : values()) { - if (value.name().equals(candidate)) { - return true; - } - } - - return false; - } - - /* - * (non-Javadoc) - * @see java.lang.Enum#toString() - */ - @Override - public String toString() { - return PREFIX.concat(name()); - } - } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpression.java index b2fee44394..1cb38ef362 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpression.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpression.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2021 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,6 +21,10 @@ /** * An {@link AggregationExpression} can be used with field expressions in aggregation pipeline stages like * {@code project} and {@code group}. + *

    + * The {@link AggregationExpression expressions} {@link #toDocument(AggregationOperationContext)} method is called during + * the mapping process to obtain the mapped, ready to use representation that can be handed over to the driver as part + * of an {@link AggregationOperation pipleine stage}. * * @author Thomas Darimont * @author Oliver Gierke @@ -39,11 +43,11 @@ public interface AggregationExpression extends MongoExpression { */ static AggregationExpression from(MongoExpression expression) { - if (expression instanceof AggregationExpression) { - return AggregationExpression.class.cast(expression); + if (expression instanceof AggregationExpression aggregationExpression) { + return aggregationExpression; } - return (context) -> context.getMappedObject(expression.toDocument()); + return context -> context.getMappedObject(expression.toDocument()); } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionCriteria.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionCriteria.java new file mode 100644 index 0000000000..1ae935a92b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionCriteria.java @@ -0,0 +1,58 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.EvaluationOperators.Expr; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; + +/** + * A {@link CriteriaDefinition criteria} to use {@code $expr} within a + * {@link org.springframework.data.mongodb.core.query.Query}. + * + * @author Christoph Strobl + * @since 4.1 + */ +public class AggregationExpressionCriteria implements CriteriaDefinition { + + private final AggregationExpression expression; + + AggregationExpressionCriteria(AggregationExpression expression) { + this.expression = expression; + } + + /** + * @param expression must not be {@literal null}. + * @return new instance of {@link AggregationExpressionCriteria}. + */ + public static AggregationExpressionCriteria whereExpr(AggregationExpression expression) { + return new AggregationExpressionCriteria(expression); + } + + @Override + public Document getCriteriaObject() { + + if (expression instanceof Expr expr) { + return new Document(getKey(), expr.get(0)); + } + return new Document(getKey(), expression); + } + + @Override + public String getKey() { + return "$expr"; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionTransformer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionTransformer.java index 3c659f166f..00db38329f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionTransformer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionTransformer.java @@ -1,5 +1,5 @@ /* - * Copyright 2013-2021 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -60,7 +60,7 @@ public AggregationExpressionTransformationContext(T currentNode, @Nullable Expre super(currentNode, parentNode, previousOperationObject); - Assert.notNull(context, "AggregationOperationContext must not be null!"); + Assert.notNull(context, "AggregationOperationContext must not be null"); this.aggregationContext = context; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationFunctionExpressions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationFunctionExpressions.java deleted file mode 100644 index 9e26c3e6f6..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationFunctionExpressions.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright 2015-2021 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.aggregation; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import org.bson.Document; -import org.springframework.util.Assert; - -/** - * An enum of supported {@link AggregationExpression}s in aggregation pipeline stages. - * - * @author Thomas Darimont - * @author Oliver Gierke - * @author Christoph Strobl - * @author Mark Paluch - * @since 1.7 - * @deprecated since 1.10. Please use {@link ArithmeticOperators} and {@link ComparisonOperators} instead. - */ -@Deprecated -public enum AggregationFunctionExpressions { - - SIZE, CMP, EQ, GT, GTE, LT, LTE, NE, SUBTRACT, ADD, MULTIPLY; - - /** - * Returns an {@link AggregationExpression} build from the current {@link Enum} name and the given parameters. - * - * @param parameters must not be {@literal null} - * @return new instance of {@link AggregationExpression}. - */ - public AggregationExpression of(Object... parameters) { - - Assert.notNull(parameters, "Parameters must not be null!"); - return new FunctionExpression(name().toLowerCase(), parameters); - } - - /** - * An {@link AggregationExpression} representing a function call. - * - * @author Thomas Darimont - * @author Oliver Gierke - * @since 1.7 - */ - static class FunctionExpression implements AggregationExpression { - - private final String name; - private final List values; - - /** - * Creates a new {@link FunctionExpression} for the given name and values. - * - * @param name must not be {@literal null} or empty. - * @param values must not be {@literal null}. - */ - public FunctionExpression(String name, Object[] values) { - - Assert.hasText(name, "Name must not be null!"); - Assert.notNull(values, "Values must not be null!"); - - this.name = name; - this.values = Arrays.asList(values); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.Expression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ - @Override - public Document toDocument(AggregationOperationContext context) { - - List args = new ArrayList(values.size()); - - for (Object value : values) { - args.add(unpack(value, context)); - } - - return new Document("$" + name, args); - } - - private static Object unpack(Object value, AggregationOperationContext context) { - - if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDocument(context); - } - - if (value instanceof Field) { - return context.getReference((Field) value).toString(); - } - - return value; - } - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperation.java index 54a3786ba2..923a1e73cf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperation.java @@ -1,5 +1,5 @@ /* - * Copyright 2013-2021 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationContext.java index d033aba3de..a49c7e46d5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationContext.java @@ -1,5 +1,5 @@ /* - * Copyright 2013-2021 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,20 +20,25 @@ import java.util.Arrays; import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; import org.springframework.beans.BeanUtils; +import org.springframework.data.mongodb.CodecRegistryProvider; import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ReflectionUtils; +import com.mongodb.MongoClientSettings; + /** * The context for an {@link AggregationOperation}. * * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch * @since 1.3 */ -public interface AggregationOperationContext { +public interface AggregationOperationContext extends CodecRegistryProvider { /** * Returns the mapped {@link Document}, potentially converting the source considering mapping metadata etc. @@ -86,7 +91,7 @@ default Document getMappedObject(Document document) { */ default Fields getFields(Class type) { - Assert.notNull(type, "Type must not be null!"); + Assert.notNull(type, "Type must not be null"); return Fields.fields(Arrays.stream(BeanUtils.getPropertyDescriptors(type)) // .filter(it -> { // object and default methods @@ -103,15 +108,53 @@ default Fields getFields(Class type) { .toArray(String[]::new)); } + /** + * Create a nested {@link AggregationOperationContext} from this context that exposes {@link ExposedFields fields}. + *

    + * Implementations of {@link AggregationOperationContext} retain their {@link FieldLookupPolicy}. If no policy is + * specified, then lookup defaults to {@link FieldLookupPolicy#strict()}. + * + * @param fields the fields to expose, must not be {@literal null}. + * @return the new {@link AggregationOperationContext} exposing {@code fields}. + * @since 4.3.1 + */ + default AggregationOperationContext expose(ExposedFields fields) { + return new ExposedFieldsAggregationOperationContext(fields, this, FieldLookupPolicy.strict()); + } + + /** + * Create a nested {@link AggregationOperationContext} from this context that inherits exposed fields from this + * context and exposes {@link ExposedFields fields}. + *

    + * Implementations of {@link AggregationOperationContext} retain their {@link FieldLookupPolicy}. If no policy is + * specified, then lookup defaults to {@link FieldLookupPolicy#strict()}. + * + * @param fields the fields to expose, must not be {@literal null}. + * @return the new {@link AggregationOperationContext} exposing {@code fields}. + * @since 4.3.1 + */ + default AggregationOperationContext inheritAndExpose(ExposedFields fields) { + return new InheritingExposedFieldsAggregationOperationContext(fields, this, FieldLookupPolicy.strict()); + } + /** * This toggle allows the {@link AggregationOperationContext context} to use any given field name without checking for - * its existence. Typically the {@link AggregationOperationContext} fails when referencing unknown fields, those that + * its existence. Typically, the {@link AggregationOperationContext} fails when referencing unknown fields, those that * are not present in one of the previous stages or the input source, throughout the pipeline. * * @return a more relaxed {@link AggregationOperationContext}. * @since 3.0 + * @deprecated since 4.3.1, {@link FieldLookupPolicy} should be specified explicitly when creating the + * AggregationOperationContext. */ + @Deprecated(since = "4.3.1", forRemoval = true) default AggregationOperationContext continueOnMissingFieldReference() { return this; } + + @Override + default CodecRegistry getCodecRegistry() { + return MongoClientSettings.getDefaultCodecRegistry(); + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRenderer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRenderer.java index cc7ffbdc55..fd5f7ed979 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRenderer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRenderer.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -55,23 +55,56 @@ static List toDocument(List operations, Aggregat operationDocuments.addAll(operation.toPipelineStages(contextToUse)); - if (operation instanceof FieldsExposingAggregationOperation) { + if (operation instanceof FieldsExposingAggregationOperation exposedFieldsOperation) { - FieldsExposingAggregationOperation exposedFieldsOperation = (FieldsExposingAggregationOperation) operation; ExposedFields fields = exposedFieldsOperation.getFields(); if (operation instanceof InheritsFieldsAggregationOperation || exposedFieldsOperation.inheritsFields()) { - contextToUse = new InheritingExposedFieldsAggregationOperationContext(fields, contextToUse); + contextToUse = contextToUse.inheritAndExpose(fields); } else { - contextToUse = fields.exposesNoFields() ? DEFAULT_CONTEXT - : new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), contextToUse); + contextToUse = fields.exposesNoFields() ? ConverterAwareNoOpContext.instance(rootContext) + : contextToUse.expose(fields); } } + } return operationDocuments; } + private static class ConverterAwareNoOpContext implements AggregationOperationContext { + + AggregationOperationContext ctx; + + static ConverterAwareNoOpContext instance(AggregationOperationContext ctx) { + + if(ctx instanceof ConverterAwareNoOpContext noOpContext) { + return noOpContext; + } + + return new ConverterAwareNoOpContext(ctx); + } + + ConverterAwareNoOpContext(AggregationOperationContext ctx) { + this.ctx = ctx; + } + + @Override + public Document getMappedObject(Document document, @Nullable Class type) { + return ctx.getMappedObject(document, null); + } + + @Override + public FieldReference getReference(Field field) { + return new DirectFieldReference(new ExposedField(field, true)); + } + + @Override + public FieldReference getReference(String name) { + return new DirectFieldReference(new ExposedField(new AggregationField(name), true)); + } + } + /** * Simple {@link AggregationOperationContext} that just returns {@link FieldReference}s as is. * @@ -80,28 +113,16 @@ static List toDocument(List operations, Aggregat */ private static class NoOpAggregationOperationContext implements AggregationOperationContext { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document, java.lang.Class) - */ @Override public Document getMappedObject(Document document, @Nullable Class type) { return document; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.ExposedFields.AvailableField) - */ @Override public FieldReference getReference(Field field) { return new DirectFieldReference(new ExposedField(field, true)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(java.lang.String) - */ @Override public FieldReference getReference(String name) { return new DirectFieldReference(new ExposedField(new AggregationField(name), true)); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOptions.java index e1afc84945..327d40b8c7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOptions.java @@ -1,5 +1,5 @@ /* - * Copyright 2014-2021 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,25 +19,36 @@ import java.util.Optional; import org.bson.Document; +import org.springframework.data.mongodb.core.ReadConcernAware; +import org.springframework.data.mongodb.core.ReadPreferenceAware; import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; + /** * Holds a set of configurable aggregation options that can be used within an aggregation pipeline. A list of support - * aggregation options can be found in the MongoDB reference documentation - * https://docs.mongodb.org/manual/reference/command/aggregate/#aggregate + * aggregation options can be found in the + * MongoDB reference documentation. + *

    + * As off 4.3 {@link #allowDiskUse} can be {@literal null}, indicating use of server default, and may only be applied if + * {@link #isAllowDiskUseSet() explicitly set}. For compatibility reasons {@link #isAllowDiskUse()} will remain + * returning {@literal false} if the no value has been set. * * @author Thomas Darimont * @author Oliver Gierke * @author Christoph Strobl * @author Mark Paluch * @author Yadhukrishna S Pai + * @author Soumya Prakash Behera * @see Aggregation#withOptions(AggregationOptions) * @see TypedAggregation#withOptions(AggregationOptions) * @since 1.6 */ -public class AggregationOptions { +public class AggregationOptions implements ReadConcernAware, ReadPreferenceAware { private static final String BATCH_SIZE = "batchSize"; private static final String CURSOR = "cursor"; @@ -48,12 +59,16 @@ public class AggregationOptions { private static final String MAX_TIME = "maxTimeMS"; private static final String HINT = "hint"; - private final boolean allowDiskUse; + private final Optional allowDiskUse; private final boolean explain; private final Optional cursor; private final Optional collation; private final Optional comment; - private final Optional hint; + private final Optional hint; + + private Optional readConcern; + + private Optional readPreference; private Duration maxTime = Duration.ZERO; private ResultOptions resultOptions = ResultOptions.READ; private DomainTypeMapping domainTypeMapping = DomainTypeMapping.RELAXED; @@ -65,7 +80,7 @@ public class AggregationOptions { * @param explain whether to get the execution plan for the aggregation instead of the actual results. * @param cursor can be {@literal null}, used to pass additional options to the aggregation. */ - public AggregationOptions(boolean allowDiskUse, boolean explain, Document cursor) { + public AggregationOptions(boolean allowDiskUse, boolean explain, @Nullable Document cursor) { this(allowDiskUse, explain, cursor, null); } @@ -112,15 +127,17 @@ public AggregationOptions(boolean allowDiskUse, boolean explain, @Nullable Docum * @param hint can be {@literal null}, used to provide an index that would be forcibly used by query optimizer. * @since 3.1 */ - private AggregationOptions(boolean allowDiskUse, boolean explain, @Nullable Document cursor, - @Nullable Collation collation, @Nullable String comment, @Nullable Document hint) { + private AggregationOptions(@Nullable Boolean allowDiskUse, boolean explain, @Nullable Document cursor, + @Nullable Collation collation, @Nullable String comment, @Nullable Object hint) { - this.allowDiskUse = allowDiskUse; + this.allowDiskUse = Optional.ofNullable(allowDiskUse); this.explain = explain; this.cursor = Optional.ofNullable(cursor); this.collation = Optional.ofNullable(collation); this.comment = Optional.ofNullable(comment); this.hint = Optional.ofNullable(hint); + this.readConcern = Optional.empty(); + this.readPreference = Optional.empty(); } /** @@ -144,9 +161,9 @@ public AggregationOptions(boolean allowDiskUse, boolean explain, int cursorBatch */ public static AggregationOptions fromDocument(Document document) { - Assert.notNull(document, "Document must not be null!"); + Assert.notNull(document, "Document must not be null"); - boolean allowDiskUse = document.getBoolean(ALLOW_DISK_USE, false); + Boolean allowDiskUse = document.get(ALLOW_DISK_USE, Boolean.class); boolean explain = document.getBoolean(EXPLAIN, false); Document cursor = document.get(CURSOR, Document.class); Collation collation = document.containsKey(COLLATION) ? Collation.from(document.get(COLLATION, Document.class)) @@ -172,13 +189,23 @@ public static Builder builder() { } /** - * Enables writing to temporary files. When set to true, aggregation stages can write data to the _tmp subdirectory in - * the dbPath directory. + * Enables writing to temporary files. When set to {@literal true}, aggregation stages can write data to the + * {@code _tmp} subdirectory in the {@code dbPath} directory. * - * @return {@literal true} if enabled. + * @return {@literal true} if enabled; {@literal false} otherwise (or if not set). */ public boolean isAllowDiskUse() { - return allowDiskUse; + return allowDiskUse.orElse(false); + } + + /** + * Return whether {@link #isAllowDiskUse} is configured. + * + * @return {@literal true} if is {@code allowDiskUse} is configured, {@literal false} otherwise. + * @since 4.2.5 + */ + public boolean isAllowDiskUseSet() { + return allowDiskUse.isPresent(); } /** @@ -236,15 +263,56 @@ public Optional getComment() { } /** - * Get the hint used to to fulfill the aggregation. + * Get the hint used to fulfill the aggregation. * * @return never {@literal null}. * @since 3.1 + * @deprecated since 4.1, use {@link #getHintObject()} instead. */ public Optional getHint() { + return hint.map(it -> { + if (it instanceof Document doc) { + return doc; + } + if (it instanceof String hintString) { + if (BsonUtils.isJsonDocument(hintString)) { + return BsonUtils.parse(hintString, null); + } + } + throw new IllegalStateException("Unable to read hint of type %s".formatted(it.getClass())); + }); + } + + /** + * Get the hint used to fulfill the aggregation. + * + * @return never {@literal null}. + * @since 4.1 + */ + public Optional getHintObject() { return hint; } + @Override + public boolean hasReadConcern() { + return readConcern.isPresent(); + } + + @Override + public ReadConcern getReadConcern() { + return readConcern.orElse(null); + } + + @Override + public boolean hasReadPreference() { + return readPreference.isPresent(); + } + + @Override + public ReadPreference getReadPreference() { + return readPreference.orElse(null); + } + /** * @return the time limit for processing. {@link Duration#ZERO} is used for the default unbounded behavior. * @since 3.0 @@ -281,8 +349,8 @@ Document applyAndReturnPotentiallyChangedCommand(Document command) { Document result = new Document(command); - if (allowDiskUse && !result.containsKey(ALLOW_DISK_USE)) { - result.put(ALLOW_DISK_USE, allowDiskUse); + if (isAllowDiskUseSet() && !result.containsKey(ALLOW_DISK_USE)) { + result.put(ALLOW_DISK_USE, isAllowDiskUse()); } if (explain && !result.containsKey(EXPLAIN)) { @@ -316,7 +384,9 @@ Document applyAndReturnPotentiallyChangedCommand(Document command) { public Document toDocument() { Document document = new Document(); - document.put(ALLOW_DISK_USE, allowDiskUse); + if (isAllowDiskUseSet()) { + document.put(ALLOW_DISK_USE, isAllowDiskUse()); + } document.put(EXPLAIN, explain); cursor.ifPresent(val -> document.put(CURSOR, val)); @@ -339,9 +409,6 @@ public boolean hasExecutionTimeLimit() { return !maxTime.isZero() && !maxTime.isNegative(); } - /* (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return toDocument().toJson(); @@ -359,12 +426,14 @@ static Document createCursor(int cursorBatchSize) { */ public static class Builder { - private boolean allowDiskUse; + private Boolean allowDiskUse; private boolean explain; private @Nullable Document cursor; private @Nullable Collation collation; private @Nullable String comment; - private @Nullable Document hint; + private @Nullable Object hint; + private @Nullable ReadConcern readConcern; + private @Nullable ReadPreference readPreference; private @Nullable Duration maxTime; private @Nullable ResultOptions resultOptions; private @Nullable DomainTypeMapping domainTypeMapping; @@ -457,6 +526,45 @@ public Builder hint(@Nullable Document hint) { return this; } + /** + * Define a hint that is used by query optimizer to to fulfill the aggregation. + * + * @param indexName can be {@literal null}. + * @return this. + * @since 4.1 + */ + public Builder hint(@Nullable String indexName) { + + this.hint = indexName; + return this; + } + + /** + * Define a {@link ReadConcern} to apply to the aggregation. + * + * @param readConcern can be {@literal null}. + * @return this. + * @since 4.1 + */ + public Builder readConcern(@Nullable ReadConcern readConcern) { + + this.readConcern = readConcern; + return this; + } + + /** + * Define a {@link ReadPreference} to apply to the aggregation. + * + * @param readPreference can be {@literal null}. + * @return this. + * @since 4.1 + */ + public Builder readPreference(@Nullable ReadPreference readPreference) { + + this.readPreference = readPreference; + return this; + } + /** * Set the time limit for processing. * @@ -540,6 +648,12 @@ public AggregationOptions build() { if (domainTypeMapping != null) { options.domainTypeMapping = domainTypeMapping; } + if (readConcern != null) { + options.readConcern = Optional.of(readConcern); + } + if (readPreference != null) { + options.readPreference = Optional.of(readPreference); + } return options; } @@ -557,7 +671,7 @@ private enum ResultOptions { /** * Read the aggregation result from the cursor. */ - READ; + READ } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationPipeline.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationPipeline.java index cf261d2b97..68662ec0df 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationPipeline.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationPipeline.java @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 the original author or authors. + * Copyright 2020-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,6 +16,7 @@ package org.springframework.data.mongodb.core.aggregation; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.function.Predicate; @@ -34,6 +35,10 @@ public class AggregationPipeline { private final List pipeline; + public static AggregationPipeline of(AggregationOperation... stages) { + return new AggregationPipeline(Arrays.asList(stages)); + } + /** * Create an empty pipeline */ @@ -48,7 +53,7 @@ public AggregationPipeline() { */ public AggregationPipeline(List aggregationOperations) { - Assert.notNull(aggregationOperations, "AggregationOperations must not be null!"); + Assert.notNull(aggregationOperations, "AggregationOperations must not be null"); pipeline = new ArrayList<>(aggregationOperations); } @@ -60,7 +65,7 @@ public AggregationPipeline(List aggregationOperations) { */ public AggregationPipeline add(AggregationOperation aggregationOperation) { - Assert.notNull(aggregationOperation, "AggregationOperation must not be null!"); + Assert.notNull(aggregationOperation, "AggregationOperation must not be null"); pipeline.add(aggregationOperation); return this; @@ -100,11 +105,11 @@ void verify() { for (AggregationOperation operation : pipeline) { if (isOut(operation) && !isLast(operation)) { - throw new IllegalArgumentException("The $out operator must be the last stage in the pipeline."); + throw new IllegalArgumentException("The $out operator must be the last stage in the pipeline"); } if (isMerge(operation) && !isLast(operation)) { - throw new IllegalArgumentException("The $merge operator must be the last stage in the pipeline."); + throw new IllegalArgumentException("The $merge operator must be the last stage in the pipeline"); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationResults.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationResults.java index 5170986f93..438eb9e49f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationResults.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationResults.java @@ -1,5 +1,5 @@ /* - * Copyright 2013-2021 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -48,8 +48,8 @@ public class AggregationResults implements Iterable { */ public AggregationResults(List mappedResults, Document rawResults) { - Assert.notNull(mappedResults, "List of mapped results must not be null!"); - Assert.notNull(rawResults, "Raw results must not be null!"); + Assert.notNull(mappedResults, "List of mapped results must not be null"); + Assert.notNull(rawResults, "Raw results must not be null"); this.mappedResults = Collections.unmodifiableList(mappedResults); this.rawResults = rawResults; @@ -73,14 +73,10 @@ public List getMappedResults() { */ @Nullable public T getUniqueMappedResult() { - Assert.isTrue(mappedResults.size() < 2, "Expected unique result or null, but got more than one!"); + Assert.isTrue(mappedResults.size() < 2, "Expected unique result or null, but got more than one"); return mappedResults.size() == 1 ? mappedResults.get(0) : null; } - /* - * (non-Javadoc) - * @see java.lang.Iterable#iterator() - */ public Iterator iterator() { return mappedResults.iterator(); } @@ -109,6 +105,6 @@ public Document getRawResults() { private String parseServerUsed() { Object object = rawResults.get("serverUsed"); - return object instanceof String ? (String) object : null; + return object instanceof String stringValue ? stringValue : null; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java index 14fa8c48d1..1626d672bc 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -24,15 +24,15 @@ * expression.
    *
    * Samples:
    - * *
    + * 
      * // { $and: [ { $gt: [ "$qty", 100 ] }, { $lt: [ "$qty", 250 ] } ] }
      * expressionOf("qty > 100 && qty < 250);
      *
      * // { $cond : { if : { $gte : [ "$a", 42 ]}, then : "answer", else : "no-answer" } }
      * expressionOf("cond(a >= 42, 'answer', 'no-answer')");
    - * 
    * + * * * @author Christoph Strobl * @author Mark Paluch @@ -60,13 +60,10 @@ private AggregationSpELExpression(String rawExpression, Object[] parameters) { */ public static AggregationSpELExpression expressionOf(String expressionString, Object... parameters) { - Assert.notNull(expressionString, "ExpressionString must not be null!"); + Assert.notNull(expressionString, "ExpressionString must not be null"); return new AggregationSpELExpression(expressionString, parameters); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { return (Document) TRANSFORMER.transform(rawExpression, context, parameters); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java index e69531e036..15d700309e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2021 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -71,8 +71,7 @@ * * @author Christoph Strobl * @author Mark Paluch - * @see MongoDB + * @see MongoDB * Reference Documentation * @since 3.0 */ @@ -98,10 +97,8 @@ protected AggregationUpdate(List pipeline) { super(pipeline); for (AggregationOperation operation : pipeline) { - if (operation instanceof FieldsExposingAggregationOperation) { - ((FieldsExposingAggregationOperation) operation).getFields().forEach(it -> { - keysTouched.add(it.getName()); - }); + if (operation instanceof FieldsExposingAggregationOperation exposingAggregationOperation) { + exposingAggregationOperation.getFields().forEach(it -> keysTouched.add(it.getName())); } } } @@ -134,7 +131,7 @@ public static AggregationUpdate from(List pipeline) { */ public AggregationUpdate set(SetOperation setOperation) { - Assert.notNull(setOperation, "SetOperation must not be null!"); + Assert.notNull(setOperation, "SetOperation must not be null"); setOperation.getFields().forEach(it -> { keysTouched.add(it.getName()); @@ -153,7 +150,7 @@ public AggregationUpdate set(SetOperation setOperation) { */ public AggregationUpdate unset(UnsetOperation unsetOperation) { - Assert.notNull(unsetOperation, "UnsetOperation must not be null!"); + Assert.notNull(unsetOperation, "UnsetOperation must not be null"); pipeline.add(unsetOperation); keysTouched.addAll(unsetOperation.removedFieldNames()); @@ -171,7 +168,7 @@ public AggregationUpdate unset(UnsetOperation unsetOperation) { */ public AggregationUpdate replaceWith(ReplaceWithOperation replaceWithOperation) { - Assert.notNull(replaceWithOperation, "ReplaceWithOperation must not be null!"); + Assert.notNull(replaceWithOperation, "ReplaceWithOperation must not be null"); pipeline.add(replaceWithOperation); return this; } @@ -184,7 +181,7 @@ public AggregationUpdate replaceWith(ReplaceWithOperation replaceWithOperation) */ public AggregationUpdate replaceWith(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return replaceWith(ReplaceWithOperation.replaceWithValue(value)); } @@ -198,7 +195,7 @@ public AggregationUpdate replaceWith(Object value) { */ public SetValueAppender set(String key) { - Assert.notNull(key, "Key must not be null!"); + Assert.notNull(key, "Key must not be null"); return new SetValueAppender() { @@ -210,7 +207,7 @@ public AggregationUpdate toValue(@Nullable Object value) { @Override public AggregationUpdate toValueOf(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return set(SetOperation.builder().set(key).toValueOf(value)); } }; @@ -224,8 +221,8 @@ public AggregationUpdate toValueOf(Object value) { */ public AggregationUpdate unset(String... keys) { - Assert.notNull(keys, "Keys must not be null!"); - Assert.noNullElements(keys, "Keys must not contain null elements."); + Assert.notNull(keys, "Keys must not be null"); + Assert.noNullElements(keys, "Keys must not contain null elements"); return unset(new UnsetOperation(Arrays.stream(keys).map(Fields::field).collect(Collectors.toList()))); } @@ -243,48 +240,26 @@ public AggregationUpdate isolated() { return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.UpdateDefinition#isIsolated() - */ @Override public Boolean isIsolated() { return isolated; } - /* - * Returns a update document containing the update pipeline. - * The resulting document needs to be unwrapped to be used with update operations. - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.UpdateDefinition#getUpdateObject() - */ @Override public Document getUpdateObject() { return new Document("", toPipeline(Aggregation.DEFAULT_CONTEXT)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.UpdateDefinition#modifies(java.lang.String) - */ @Override public boolean modifies(String key) { return keysTouched.contains(key); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.UpdateDefinition#inc(java.lang.String) - */ @Override public void inc(String key) { set(new SetOperation(key, ArithmeticOperators.valueOf(key).add(1))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.UpdateDefinition#getArrayFilters() - */ @Override public List getArrayFilters() { return Collections.emptyList(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUtils.java index 6a62379185..e84f7ed1b0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUtils.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -36,12 +36,12 @@ interface AggregationUtils { */ static List toRangeValues(Range range) { - Assert.notNull(range, "Range must not be null!"); + Assert.notNull(range, "Range must not be null"); List result = new ArrayList(2); result.add(range.getLowerBound().getValue() - .orElseThrow(() -> new IllegalArgumentException("Lower bound of range must be bounded!"))); - range.getUpperBound().getValue().ifPresent(it -> result.add(it)); + .orElseThrow(() -> new IllegalArgumentException("Lower bound of range must be bounded"))); + range.getUpperBound().getValue().ifPresent(result::add); return result; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationVariable.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationVariable.java new file mode 100644 index 0000000000..ed79202345 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationVariable.java @@ -0,0 +1,133 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/** + * A special field that points to a variable {@code $$} expression. + * + * @author Christoph Strobl + * @since 4.1.3 + */ +public interface AggregationVariable extends Field { + + String PREFIX = "$$"; + + /** + * @return {@literal true} if the fields {@link #getName() name} does not match the defined {@link #getTarget() + * target}. + */ + @Override + default boolean isAliased() { + return !ObjectUtils.nullSafeEquals(getName(), getTarget()); + } + + @Override + default String getName() { + return getTarget(); + } + + @Override + default boolean isInternal() { + return false; + } + + /** + * Create a new {@link AggregationVariable} for the given name. + *

    + * Variables start with {@code $$}. If not, the given value gets prefixed with {@code $$}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link AggregationVariable}. + * @throws IllegalArgumentException if given value is {@literal null}. + */ + static AggregationVariable variable(String value) { + + Assert.notNull(value, "Value must not be null"); + return new AggregationVariable() { + + private final String val = AggregationVariable.prefixVariable(value); + + @Override + public String getTarget() { + return val; + } + }; + } + + /** + * Create a new {@link #isInternal() local} {@link AggregationVariable} for the given name. + *

    + * Variables start with {@code $$}. If not, the given value gets prefixed with {@code $$}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link AggregationVariable}. + * @throws IllegalArgumentException if given value is {@literal null}. + */ + static AggregationVariable localVariable(String value) { + + Assert.notNull(value, "Value must not be null"); + return new AggregationVariable() { + + private final String val = AggregationVariable.prefixVariable(value); + + @Override + public String getTarget() { + return val; + } + + @Override + public boolean isInternal() { + return true; + } + }; + } + + /** + * Check if the given field name reference may be variable. + * + * @param fieldRef can be {@literal null}. + * @return true if given value matches the variable identification pattern. + */ + static boolean isVariable(@Nullable String fieldRef) { + return fieldRef != null && fieldRef.stripLeading().matches("^\\$\\$\\w.*"); + } + + /** + * Check if the given field may be variable. + * + * @param field can be {@literal null}. + * @return true if given {@link Field field} is an {@link AggregationVariable} or if its value is a + * {@link #isVariable(String) variable}. + */ + static boolean isVariable(Field field) { + + if (field instanceof AggregationVariable) { + return true; + } + return isVariable(field.getTarget()); + } + + private static String prefixVariable(String variable) { + + var trimmed = variable.stripLeading(); + return trimmed.startsWith(PREFIX) ? trimmed : (PREFIX + trimmed); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 6053f3ae1b..e2c31c6346 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,19 +17,33 @@ import java.util.Collections; import java.util.List; +import java.util.Locale; +import org.bson.Document; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Avg; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.CovariancePop; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.CovarianceSamp; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Max; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Median; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Min; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Percentile; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.StdDevPop; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.StdDevSamp; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Sum; +import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.WindowUnit; +import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.WindowUnits; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; /** * Gateway to {@literal Arithmetic} aggregation operations that perform math operations on numbers. * * @author Christoph Strobl + * @author Mark Paluch + * @author Mushtaq Ahmed + * @author Julia Lee * @since 1.10 */ public class ArithmeticOperators { @@ -54,6 +68,17 @@ public static ArithmeticOperatorFactory valueOf(AggregationExpression expression return new ArithmeticOperatorFactory(expression); } + /** + * Creates new {@link AggregationExpression} that returns a random float between {@code 0} and {@code 1} each time it + * is called. + * + * @return new instance of {@link Rand}. + * @since 3.3 + */ + public static Rand rand() { + return new Rand(); + } + /** * @author Christoph Strobl */ @@ -69,7 +94,7 @@ public static class ArithmeticOperatorFactory { */ public ArithmeticOperatorFactory(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.fieldReference = fieldReference; this.expression = null; } @@ -81,7 +106,7 @@ public ArithmeticOperatorFactory(String fieldReference) { */ public ArithmeticOperatorFactory(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); this.fieldReference = null; this.expression = expression; } @@ -104,7 +129,7 @@ public Abs abs() { */ public Add add(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createAdd().add(fieldReference); } @@ -117,7 +142,7 @@ public Add add(String fieldReference) { */ public Add add(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createAdd().add(expression); } @@ -129,7 +154,7 @@ public Add add(AggregationExpression expression) { */ public Add add(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return createAdd().add(value); } @@ -139,7 +164,7 @@ private Add createAdd() { /** * Creates new {@link AggregationExpression} that returns the smallest integer greater than or equal to the - * assoicated number. + * associated number. * * @return new instance of {@link Ceil}. */ @@ -148,7 +173,47 @@ public Ceil ceil() { } /** - * Creates new {@link AggregationExpression} that ivides the associated number by number referenced via + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative() { + return derivative((String) null); + } + + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @param unit The time unit ({@link WindowUnits#WEEK}, {@link WindowUnits#DAY}, {@link WindowUnits#HOUR}, + * {@link WindowUnits#MINUTE}, {@link WindowUnits#SECOND}, {@link WindowUnits#MILLISECOND}) to apply. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative(WindowUnit unit) { + + Assert.notNull(unit, "Window unit must not be null"); + + return derivative(unit.name().toLowerCase(Locale.ROOT)); + } + + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @param unit The time unit ({@literal week, day, hour, minute, second, millisecond}) to apply can be + * {@literal null}. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative(@Nullable String unit) { + + Derivative derivative = usesFieldRef() ? Derivative.derivativeOf(fieldReference) + : Derivative.derivativeOf(expression); + return StringUtils.hasText(unit) ? derivative.unit(unit) : derivative; + } + + /** + * Creates new {@link AggregationExpression} that divides the associated number by number referenced via * {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. @@ -156,7 +221,7 @@ public Ceil ceil() { */ public Divide divideBy(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createDivide().divideBy(fieldReference); } @@ -169,7 +234,7 @@ public Divide divideBy(String fieldReference) { */ public Divide divideBy(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createDivide().divideBy(expression); } @@ -181,7 +246,7 @@ public Divide divideBy(AggregationExpression expression) { */ public Divide divideBy(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return createDivide().divideBy(value); } @@ -209,7 +274,46 @@ public Floor floor() { } /** - * Creates new {@link AggregationExpression} that calculates the natural logarithm ln (i.e loge) of the assoicated + * Creates new {@link AggregationExpression} that calculates the approximation for the mathematical integral value. + * + * @return new instance of {@link Integral}. + * @since 3.3 + */ + public Integral integral() { + return usesFieldRef() ? Integral.integralOf(fieldReference) : Integral.integralOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the approximation for the mathematical integral value. + * + * @param unit The time unit ({@link WindowUnits#WEEK}, {@link WindowUnits#DAY}, {@link WindowUnits#HOUR}, + * {@link WindowUnits#MINUTE}, {@link WindowUnits#SECOND}, {@link WindowUnits#MILLISECOND}) to apply. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Integral integral(WindowUnit unit) { + + Assert.notNull(unit, "Window unit must not be null"); + + return integral(unit.name().toLowerCase(Locale.ROOT)); + } + + /** + * Creates new {@link AggregationExpression} that calculates the approximation for the mathematical integral value. + * + * @param unit the unit of measure. + * @return new instance of {@link Integral}. + * @since 3.3 + */ + public Integral integral(String unit) { + + Assert.hasText(unit, "Unit must not be empty"); + + return integral().unit(unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the natural logarithm ln (i.e loge) of the associated * number. * * @return new instance of {@link Ln}. @@ -227,7 +331,7 @@ public Ln ln() { */ public Log log(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createLog().log(fieldReference); } @@ -240,7 +344,7 @@ public Log log(String fieldReference) { */ public Log log(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createLog().log(fieldReference); } @@ -253,7 +357,7 @@ public Log log(AggregationExpression expression) { */ public Log log(Number base) { - Assert.notNull(base, "Base must not be null!"); + Assert.notNull(base, "Base must not be null"); return createLog().log(base); } @@ -279,7 +383,7 @@ public Log10 log10() { */ public Mod mod(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createMod().mod(fieldReference); } @@ -292,7 +396,7 @@ public Mod mod(String fieldReference) { */ public Mod mod(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createMod().mod(expression); } @@ -305,7 +409,7 @@ public Mod mod(AggregationExpression expression) { */ public Mod mod(Number value) { - Assert.notNull(value, "Base must not be null!"); + Assert.notNull(value, "Base must not be null"); return createMod().mod(value); } @@ -321,7 +425,7 @@ private Mod createMod() { */ public Multiply multiplyBy(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createMultiply().multiplyBy(fieldReference); } @@ -333,7 +437,7 @@ public Multiply multiplyBy(String fieldReference) { */ public Multiply multiplyBy(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createMultiply().multiplyBy(expression); } @@ -345,7 +449,7 @@ public Multiply multiplyBy(AggregationExpression expression) { */ public Multiply multiplyBy(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return createMultiply().multiplyBy(value); } @@ -361,7 +465,7 @@ private Multiply createMultiply() { */ public Pow pow(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createPow().pow(fieldReference); } @@ -373,7 +477,7 @@ public Pow pow(String fieldReference) { */ public Pow pow(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createPow().pow(expression); } @@ -385,7 +489,7 @@ public Pow pow(AggregationExpression expression) { */ public Pow pow(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return createPow().pow(value); } @@ -410,7 +514,7 @@ public Sqrt sqrt() { */ public Subtract subtract(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createSubtract().subtract(fieldReference); } @@ -422,7 +526,7 @@ public Subtract subtract(String fieldReference) { */ public Subtract subtract(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createSubtract().subtract(expression); } @@ -434,7 +538,7 @@ public Subtract subtract(AggregationExpression expression) { */ public Subtract subtract(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return createSubtract().subtract(value); } @@ -511,6 +615,63 @@ public StdDevSamp stdDevSamp() { : AccumulatorOperators.StdDevSamp.stdDevSampOf(expression); } + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the population covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(String fieldReference) { + return covariancePop().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the population covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(AggregationExpression expression) { + return covariancePop().and(expression); + } + + private CovariancePop covariancePop() { + return usesFieldRef() ? CovariancePop.covariancePopOf(fieldReference) : CovariancePop.covariancePopOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the sample covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(String fieldReference) { + return covarianceSamp().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the sample covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(AggregationExpression expression) { + return covarianceSamp().and(expression); + } + + private CovarianceSamp covarianceSamp() { + return usesFieldRef() ? CovarianceSamp.covarianceSampOf(fieldReference) + : CovarianceSamp.covarianceSampOf(expression); + } + /** * Creates new {@link AggregationExpression} that rounds a number to a whole integer or to a specified decimal * place. @@ -532,6 +693,274 @@ public Round roundToPlace(int place) { return round().place(place); } + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Sin sin() { + return sin(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value in the given + * {@link AngularUnit unit}. + * + * @param unit the unit of measure. + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Sin sin(AngularUnit unit) { + return usesFieldRef() ? Sin.sinOf(fieldReference, unit) : Sin.sinOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Sinh}. + * @since 3.3 + */ + public Sinh sinh() { + return sinh(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value. + * + * @param unit the unit of measure. + * @return new instance of {@link Sinh}. + * @since 3.3 + */ + public Sinh sinh(AngularUnit unit) { + return usesFieldRef() ? Sinh.sinhOf(fieldReference, unit) : Sinh.sinhOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse sine of a numeric value. + * + * @return new instance of {@link ASin}. + * @since 3.3 + */ + public ASin asin() { + return usesFieldRef() ? ASin.asinOf(fieldReference) : ASin.asinOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a numeric value. + * + * @return new instance of {@link ASinh}. + * @since 3.3 + */ + public ASinh asinh() { + return usesFieldRef() ? ASinh.asinhOf(fieldReference) : ASinh.asinhOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Cos}. + * @since 3.3 + */ + public Cos cos() { + return cos(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value in the given + * {@link AngularUnit unit}. + * + * @param unit the unit of measure. + * @return new instance of {@link Cos}. + * @since 3.3 + */ + public Cos cos(AngularUnit unit) { + return usesFieldRef() ? Cos.cosOf(fieldReference, unit) : Cos.cosOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic cosine of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Cosh}. + * @since 3.3 + */ + public Cosh cosh() { + return cosh(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic cosine of a numeric value. + * + * @param unit the unit of measure. + * @return new instance of {@link Cosh}. + * @since 3.3 + */ + public Cosh cosh(AngularUnit unit) { + return usesFieldRef() ? Cosh.coshOf(fieldReference, unit) : Cosh.coshOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse cosine of a numeric value. + * + * @return new instance of {@link ACos}. + * @since 3.4 + */ + public ACos acos() { + return usesFieldRef() ? ACos.acosOf(fieldReference) : ACos.acosOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse hyperbolic cosine of a numeric value. + * + * @return new instance of {@link ACosh}. + * @since 3.4 + */ + public ACosh acosh() { + return usesFieldRef() ? ACosh.acoshOf(fieldReference) : ACosh.acoshOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Tan}. + * @since 3.3 + */ + public Tan tan() { + return tan(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of a numeric value. + * + * @return new instance of {@link ATan}. + * @since 3.3 + */ + public ATan atan() { + return usesFieldRef() ? ATan.atanOf(fieldReference) : ATan.atanOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by + * the given numeric value in the argument. + * + * @param value the numeric value + * @return new instance of {@link ATan2}. + * @since 3.3 + */ + public ATan2 atan2(Number value) { + + Assert.notNull(value, "Value must not be null"); + return createATan2().atan2of(value); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by + * the given field reference in the argument. + * + * @param fieldReference the numeric value + * @return new instance of {@link ATan2}. + * @since 3.3 + */ + public ATan2 atan2(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createATan2().atan2of(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by + * the given {@link AggregationExpression} in the argument. + * + * @param expression the expression evaluating to a numeric value + * @return new instance of {@link ATan2}. + * @since 3.3 + */ + public ATan2 atan2(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return createATan2().atan2of(expression); + } + + private ATan2 createATan2() { + + return usesFieldRef() ? ATan2.valueOf(fieldReference) : ATan2.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse hyperbolic tangent of a numeric value. + * + * @return new instance of {@link ATanh}. + * @since 3.3 + */ + public ATanh atanh() { + return usesFieldRef() ? ATanh.atanhOf(fieldReference) : ATanh.atanhOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value in the given + * {@link AngularUnit unit}. + * + * @param unit the unit of measure. + * @return new instance of {@link Tan}. + * @since 3.3 + */ + public Tan tan(AngularUnit unit) { + return usesFieldRef() ? Tan.tanOf(fieldReference, unit) : Tan.tanOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic tangent of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Tan}. + * @since 3.3 + */ + public Tanh tanh() { + return tanh(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic tangent of a numeric value. + * + * @param unit the unit of measure. + * @return new instance of {@link Tanh}. + * @since 3.3 + */ + public Tanh tanh(AngularUnit unit) { + return usesFieldRef() ? Tanh.tanhOf(fieldReference, unit) : Tanh.tanhOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the requested percentile(s) of the + * numeric value. + * + * @return new instance of {@link Percentile}. + * @param percentages must not be {@literal null}. + * @since 4.2 + */ + public Percentile percentile(Double... percentages) { + Percentile percentile = usesFieldRef() ? AccumulatorOperators.Percentile.percentileOf(fieldReference) + : AccumulatorOperators.Percentile.percentileOf(expression); + return percentile.percentages(percentages); + } + + /** + * Creates new {@link AggregationExpression} that calculates the requested percentile(s) of the + * numeric value. + * + * @return new instance of {@link Median}. + * @since 4.2 + */ + public Median median() { + return usesFieldRef() ? AccumulatorOperators.Median.medianOf(fieldReference) + : AccumulatorOperators.Median.medianOf(expression); + } + private boolean usesFieldRef() { return fieldReference != null; } @@ -561,7 +990,7 @@ protected String getMongoMethod() { */ public static Abs absoluteValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Abs(Fields.field(fieldReference)); } @@ -573,7 +1002,7 @@ public static Abs absoluteValueOf(String fieldReference) { */ public static Abs absoluteValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Abs(expression); } @@ -585,7 +1014,7 @@ public static Abs absoluteValueOf(AggregationExpression expression) { */ public static Abs absoluteValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Abs(value); } } @@ -614,7 +1043,7 @@ protected String getMongoMethod() { */ public static Add valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Add(asFields(fieldReference)); } @@ -626,7 +1055,7 @@ public static Add valueOf(String fieldReference) { */ public static Add valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Add(Collections.singletonList(expression)); } @@ -638,7 +1067,7 @@ public static Add valueOf(AggregationExpression expression) { */ public static Add valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Add(Collections.singletonList(value)); } @@ -650,7 +1079,7 @@ public static Add valueOf(Number value) { */ public Add add(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Add(append(Fields.field(fieldReference))); } @@ -662,7 +1091,7 @@ public Add add(String fieldReference) { */ public Add add(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Add(append(expression)); } @@ -701,7 +1130,7 @@ protected String getMongoMethod() { */ public static Ceil ceilValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Ceil(Fields.field(fieldReference)); } @@ -713,7 +1142,7 @@ public static Ceil ceilValueOf(String fieldReference) { */ public static Ceil ceilValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Ceil(expression); } @@ -725,7 +1154,7 @@ public static Ceil ceilValueOf(AggregationExpression expression) { */ public static Ceil ceilValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Ceil(value); } } @@ -754,7 +1183,7 @@ protected String getMongoMethod() { */ public static Divide valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Divide(asFields(fieldReference)); } @@ -766,7 +1195,7 @@ public static Divide valueOf(String fieldReference) { */ public static Divide valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Divide(Collections.singletonList(expression)); } @@ -778,7 +1207,7 @@ public static Divide valueOf(AggregationExpression expression) { */ public static Divide valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Divide(Collections.singletonList(value)); } @@ -790,7 +1219,7 @@ public static Divide valueOf(Number value) { */ public Divide divideBy(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Divide(append(Fields.field(fieldReference))); } @@ -802,7 +1231,7 @@ public Divide divideBy(String fieldReference) { */ public Divide divideBy(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Divide(append(expression)); } @@ -841,7 +1270,7 @@ protected String getMongoMethod() { */ public static Exp expValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Exp(Fields.field(fieldReference)); } @@ -853,7 +1282,7 @@ public static Exp expValueOf(String fieldReference) { */ public static Exp expValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Exp(expression); } @@ -865,7 +1294,7 @@ public static Exp expValueOf(AggregationExpression expression) { */ public static Exp expValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Exp(value); } } @@ -894,7 +1323,7 @@ protected String getMongoMethod() { */ public static Floor floorValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Floor(Fields.field(fieldReference)); } @@ -906,7 +1335,7 @@ public static Floor floorValueOf(String fieldReference) { */ public static Floor floorValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Floor(expression); } @@ -918,7 +1347,7 @@ public static Floor floorValueOf(AggregationExpression expression) { */ public static Floor floorValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Floor(value); } } @@ -947,7 +1376,7 @@ protected String getMongoMethod() { */ public static Ln lnValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Ln(Fields.field(fieldReference)); } @@ -959,7 +1388,7 @@ public static Ln lnValueOf(String fieldReference) { */ public static Ln lnValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Ln(expression); } @@ -971,7 +1400,7 @@ public static Ln lnValueOf(AggregationExpression expression) { */ public static Ln lnValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Ln(value); } } @@ -1000,7 +1429,7 @@ protected String getMongoMethod() { */ public static Log valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Log(asFields(fieldReference)); } @@ -1012,7 +1441,7 @@ public static Log valueOf(String fieldReference) { */ public static Log valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Log(Collections.singletonList(expression)); } @@ -1024,7 +1453,7 @@ public static Log valueOf(AggregationExpression expression) { */ public static Log valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Log(Collections.singletonList(value)); } @@ -1036,7 +1465,7 @@ public static Log valueOf(Number value) { */ public Log log(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Log(append(Fields.field(fieldReference))); } @@ -1048,7 +1477,7 @@ public Log log(String fieldReference) { */ public Log log(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Log(append(expression)); } @@ -1087,7 +1516,7 @@ protected String getMongoMethod() { */ public static Log10 log10ValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Log10(Fields.field(fieldReference)); } @@ -1099,7 +1528,7 @@ public static Log10 log10ValueOf(String fieldReference) { */ public static Log10 log10ValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Log10(expression); } @@ -1111,7 +1540,7 @@ public static Log10 log10ValueOf(AggregationExpression expression) { */ public static Log10 log10ValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Log10(value); } } @@ -1140,7 +1569,7 @@ protected String getMongoMethod() { */ public static Mod valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Mod(asFields(fieldReference)); } @@ -1152,7 +1581,7 @@ public static Mod valueOf(String fieldReference) { */ public static Mod valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Mod(Collections.singletonList(expression)); } @@ -1164,7 +1593,7 @@ public static Mod valueOf(AggregationExpression expression) { */ public static Mod valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Mod(Collections.singletonList(value)); } @@ -1176,7 +1605,7 @@ public static Mod valueOf(Number value) { */ public Mod mod(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Mod(append(Fields.field(fieldReference))); } @@ -1188,7 +1617,7 @@ public Mod mod(String fieldReference) { */ public Mod mod(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Mod(append(expression)); } @@ -1227,7 +1656,7 @@ protected String getMongoMethod() { */ public static Multiply valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Multiply(asFields(fieldReference)); } @@ -1239,7 +1668,7 @@ public static Multiply valueOf(String fieldReference) { */ public static Multiply valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Multiply(Collections.singletonList(expression)); } @@ -1251,7 +1680,7 @@ public static Multiply valueOf(AggregationExpression expression) { */ public static Multiply valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Multiply(Collections.singletonList(value)); } @@ -1263,7 +1692,7 @@ public static Multiply valueOf(Number value) { */ public Multiply multiplyBy(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Multiply(append(Fields.field(fieldReference))); } @@ -1275,7 +1704,7 @@ public Multiply multiplyBy(String fieldReference) { */ public Multiply multiplyBy(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Multiply(append(expression)); } @@ -1314,7 +1743,7 @@ protected String getMongoMethod() { */ public static Pow valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Pow(asFields(fieldReference)); } @@ -1326,7 +1755,7 @@ public static Pow valueOf(String fieldReference) { */ public static Pow valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Pow(Collections.singletonList(expression)); } @@ -1338,7 +1767,7 @@ public static Pow valueOf(AggregationExpression expression) { */ public static Pow valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Pow(Collections.singletonList(value)); } @@ -1350,7 +1779,7 @@ public static Pow valueOf(Number value) { */ public Pow pow(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Pow(append(Fields.field(fieldReference))); } @@ -1362,7 +1791,7 @@ public Pow pow(String fieldReference) { */ public Pow pow(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Pow(append(expression)); } @@ -1401,7 +1830,7 @@ protected String getMongoMethod() { */ public static Sqrt sqrtOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Sqrt(Fields.field(fieldReference)); } @@ -1413,7 +1842,7 @@ public static Sqrt sqrtOf(String fieldReference) { */ public static Sqrt sqrtOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Sqrt(expression); } @@ -1425,7 +1854,7 @@ public static Sqrt sqrtOf(AggregationExpression expression) { */ public static Sqrt sqrtOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Sqrt(value); } } @@ -1454,7 +1883,7 @@ protected String getMongoMethod() { */ public static Subtract valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Subtract(asFields(fieldReference)); } @@ -1466,7 +1895,7 @@ public static Subtract valueOf(String fieldReference) { */ public static Subtract valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Subtract(Collections.singletonList(expression)); } @@ -1478,7 +1907,7 @@ public static Subtract valueOf(AggregationExpression expression) { */ public static Subtract valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Subtract(Collections.singletonList(value)); } @@ -1490,7 +1919,7 @@ public static Subtract valueOf(Number value) { */ public Subtract subtract(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Subtract(append(Fields.field(fieldReference))); } @@ -1502,7 +1931,7 @@ public Subtract subtract(String fieldReference) { */ public Subtract subtract(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Subtract(append(expression)); } @@ -1541,7 +1970,7 @@ protected String getMongoMethod() { */ public static Trunc truncValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Trunc(Fields.field(fieldReference)); } @@ -1553,7 +1982,7 @@ public static Trunc truncValueOf(String fieldReference) { */ public static Trunc truncValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Trunc(expression); } @@ -1565,7 +1994,7 @@ public static Trunc truncValueOf(AggregationExpression expression) { */ public static Trunc truncValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Trunc(value); } } @@ -1597,7 +2026,7 @@ private Round(Object value) { */ public static Round roundValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Round(Collections.singletonList(Fields.field(fieldReference))); } @@ -1609,7 +2038,7 @@ public static Round roundValueOf(String fieldReference) { */ public static Round roundValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Round(Collections.singletonList(expression)); } @@ -1621,7 +2050,7 @@ public static Round roundValueOf(AggregationExpression expression) { */ public static Round round(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Round(Collections.singletonList(value)); } @@ -1643,7 +2072,7 @@ public Round place(int place) { */ public Round placeOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Round(append(expression)); } @@ -1656,7 +2085,7 @@ public Round placeOf(AggregationExpression expression) { */ public Round placeOf(String fieldReference) { - Assert.notNull(fieldReference, "fieldReference must not be null!"); + Assert.notNull(fieldReference, "fieldReference must not be null"); return new Round(append(Fields.field(fieldReference))); } @@ -1665,4 +2094,1128 @@ protected String getMongoMethod() { return "$round"; } } + + /** + * Value object to represent an {@link AggregationExpression expression} that calculates the average rate of change + * within the specified window. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Derivative extends AbstractAggregationExpression { + + private Derivative(Object value) { + super(value); + } + + /** + * Create a new instance of {@link Derivative} for the value stored at the given field holding a numeric value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Derivative}. + */ + public static Derivative derivativeOf(String fieldReference) { + return new Derivative(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Create a new instance of {@link Derivative} for the value provided by the given expression that resolves to a + * numeric value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Derivative}. + */ + public static Derivative derivativeOf(AggregationExpression expression) { + return new Derivative(Collections.singletonMap("input", expression)); + } + + public static Derivative derivativeOfValue(Number value) { + return new Derivative(Collections.singletonMap("input", value)); + } + + public Derivative unit(String unit) { + return new Derivative(append("unit", unit)); + } + + @Override + protected String getMongoMethod() { + return "$derivative"; + } + } + + /** + * Value object to represent an {@link AggregationExpression expression} that calculates the approximation for the + * mathematical integral value. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Integral extends AbstractAggregationExpression { + + private Integral(Object value) { + super(value); + } + + /** + * Create a new instance of {@link Integral} for the value stored at the given field holding a numeric value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Integral}. + */ + public static Integral integralOf(String fieldReference) { + return new Integral(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Create a new instance of {@link Integral} for the value provided by the given expression that resolves to a + * numeric value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Integral}. + */ + public static Integral integralOf(AggregationExpression expression) { + return new Integral(Collections.singletonMap("input", expression)); + } + + /** + * Set the unit of measure. + * + * @param unit the unit of measure. + * @return new instance of {@link Integral}. + */ + public Integral unit(String unit) { + return new Integral(append("unit", unit)); + } + + @Override + protected String getMongoMethod() { + return "$integral"; + } + } + + /** + * The unit of measure for computations that operate upon angles. + * + * @author Christoph Strobl + * @since 3.3 + */ + public enum AngularUnit { + RADIANS, DEGREES + } + + /** + * An {@link AggregationExpression expression} that calculates the sine of a value that is measured in radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Sin extends AbstractAggregationExpression { + + private Sin(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in + * {@link AngularUnit#RADIANS radians}. + *
    + * Use {@code sinhOf("angle", DEGREES)} as shortcut for + * + *

    +		 * { $sinh : { $degreesToRadians : "$angle" } }
    +		 * 
    + * + * . + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(String fieldReference) { + return sinOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(String fieldReference, AngularUnit unit) { + return sin(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(AggregationExpression expression) { + return sinOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(AggregationExpression expression, AngularUnit unit) { + return sin(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @return new instance of {@link Sin}. + */ + public static Sin sin(Object value) { + return sin(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sin sin(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Sin(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Sin(value); + } + + @Override + protected String getMongoMethod() { + return "$sin"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Sinh extends AbstractAggregationExpression { + + private Sinh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(String fieldReference) { + return sinhOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * the given {@link AngularUnit unit}. + *
    + * Use {@code sinhOf("angle", DEGREES)} as shortcut for + * + *
    +		 * { $sinh : { $degreesToRadians : "$angle" } }
    +		 * 
    + * + * . + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(String fieldReference, AngularUnit unit) { + return sinh(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + *
    + * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. + * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(AggregationExpression expression) { + return sinhOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(AggregationExpression expression, AngularUnit unit) { + return sinh(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link Sin}. + */ + public static Sinh sinh(Object value) { + return sinh(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sinh sinh(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Sinh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Sinh(value); + } + + @Override + protected String getMongoMethod() { + return "$sinh"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse sine of a value. + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class ASin extends AbstractAggregationExpression { + + private ASin(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse sine of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ASin}. + */ + public static ASin asinOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ASin(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse sine of a value. + *
    + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ASin}. + */ + public static ASin asinOf(AggregationExpression expression) { + return new ASin(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse sine of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ASin}. + */ + public static ASin asinOf(Number value) { + return new ASin(value); + } + + @Override + protected String getMongoMethod() { + return "$asin"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse hyperbolic sine of a value + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class ASinh extends AbstractAggregationExpression { + + private ASinh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ASinh}. + */ + public static ASinh asinhOf(String fieldReference) { + return new ASinh(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. + *
    + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ASinh}. + */ + public static ASinh asinhOf(AggregationExpression expression) { + return new ASinh(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ASinh}. + */ + public static ASinh asinhOf(Object value) { + return new ASinh(value); + } + + @Override + protected String getMongoMethod() { + return "$asinh"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the cosine of a value that is measured in radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Cos extends AbstractAggregationExpression { + + private Cos(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in + * {@link AngularUnit#RADIANS radians}. + *
    + * Use {@code cosOf("angle", DEGREES)} as shortcut for + * + *
    +		 * { $cos : { $degreesToRadians : "$angle" } }
    +		 * 
    + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(String fieldReference) { + return cosOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(String fieldReference, AngularUnit unit) { + return cos(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(AggregationExpression expression) { + return cosOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(AggregationExpression expression, AngularUnit unit) { + return cos(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @return new instance of {@link Cos}. + */ + public static Cos cos(Object value) { + return cos(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cos}. + */ + public static Cos cos(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Cos(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Cos(value); + } + + @Override + protected String getMongoMethod() { + return "$cos"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Cosh extends AbstractAggregationExpression { + + private Cosh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(String fieldReference) { + return coshOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * the given {@link AngularUnit unit}. + *
    + * Use {@code coshOf("angle", DEGREES)} as shortcut for + * + *
    +		 * { $cosh : { $degreesToRadians : "$angle" } }
    +		 * 
    + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(String fieldReference, AngularUnit unit) { + return cosh(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + *
    + * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. + * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(AggregationExpression expression) { + return coshOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(AggregationExpression expression, AngularUnit unit) { + return cosh(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link Cosh}. + */ + public static Cosh cosh(Object value) { + return cosh(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cosh}. + */ + public static Cosh cosh(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Cosh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Cosh(value); + } + + @Override + protected String getMongoMethod() { + return "$cosh"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse cosine of a value. + * + * @author Divya Srivastava + * @since 3.4 + */ + public static class ACos extends AbstractAggregationExpression { + + private ACos(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse cosine of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ACos}. + */ + public static ACos acosOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ACos(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse cosine of a value. + *
    + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ACos}. + */ + public static ACos acosOf(AggregationExpression expression) { + return new ACos(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse cosine of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ACos}. + */ + public static ACos acosOf(Number value) { + return new ACos(value); + } + + @Override + protected String getMongoMethod() { + return "$acos"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse hyperbolic cosine of a value. + * + * @author Divya Srivastava + * @since 3.4 + */ + public static class ACosh extends AbstractAggregationExpression { + + private ACosh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic cosine of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ACosh}. + */ + public static ACosh acoshOf(String fieldReference) { + return new ACosh(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic cosine of a value. + *
    + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ACosh}. + */ + public static ACosh acoshOf(AggregationExpression expression) { + return new ACosh(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic cosine of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ACosh}. + */ + public static ACosh acoshOf(Object value) { + return new ACosh(value); + } + + @Override + protected String getMongoMethod() { + return "$acosh"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the tangent of a value that is measured in radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Tan extends AbstractAggregationExpression { + + private Tan(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in + * {@link AngularUnit#RADIANS radians}. + *
    + * Use {@code tanOf("angle", DEGREES)} as shortcut for + * + *
    +		 * { $tan : { $degreesToRadians : "$angle" } }
    +		 * 
    + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(String fieldReference) { + return tanOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(String fieldReference, AngularUnit unit) { + return tan(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(AggregationExpression expression) { + return tanOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(AggregationExpression expression, AngularUnit unit) { + return tan(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @return new instance of {@link Tan}. + */ + public static Tan tan(Object value) { + return tan(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tan}. + */ + public static Tan tan(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Tan(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Tan(value); + } + + @Override + protected String getMongoMethod() { + return "$tan"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse tangent of a value. + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class ATan extends AbstractAggregationExpression { + + private ATan(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ATan}. + */ + public static ATan atanOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ATan(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of a value. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ATan}. + */ + public static ATan atanOf(AggregationExpression expression) { + return new ATan(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ATan}. + */ + public static ATan atanOf(Number value) { + return new ATan(value); + } + + @Override + protected String getMongoMethod() { + return "$atan"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse tangent of y / x, where y and x are the + * first and second values passed to the expression respectively. + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class ATan2 extends AbstractAggregationExpression { + + private ATan2(List value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ATan2}. + */ + public static ATan2 valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ATan2(asFields(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ATan2}. + */ + public static ATan2 valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new ATan2((Collections.singletonList(expression))); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param fieldReference anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2of(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ATan2(append(Fields.field(fieldReference))); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param expression anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2of(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new ATan2(append(expression)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param value of type {@link Number} + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2of(Number value) { + return new ATan2(append(value)); + } + + @Override + protected String getMongoMethod() { + return "$atan2"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Tanh extends AbstractAggregationExpression { + + private Tanh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(String fieldReference) { + return tanhOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * the given {@link AngularUnit unit}. + *
    + * Use {@code tanhOf("angle", DEGREES)} as shortcut for + * + *
    +		 * { $tanh : { $degreesToRadians : "$angle" } }
    +		 * 
    + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(String fieldReference, AngularUnit unit) { + return tanh(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + *
    + * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. + * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(AggregationExpression expression) { + return tanhOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(AggregationExpression expression, AngularUnit unit) { + return tanh(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanh(Object value) { + return tanh(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanh(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Tanh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Tanh(value); + } + + @Override + protected String getMongoMethod() { + return "$tanh"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse hyperbolic tangent of a value + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class ATanh extends AbstractAggregationExpression { + + private ATanh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse + * hyperbolic tangent of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a + * numeric value. + * @return new instance of {@link ATanh}. + */ + public static ATanh atanhOf(String fieldReference) { + return new ATanh(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic tangent of a value. + *
    + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ATanh}. + */ + public static ATanh atanhOf(AggregationExpression expression) { + return new ATanh(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse + * hyperbolic tangent of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression + * expression}, ...) that resolves to a numeric value. + * @return new instance of {@link ATanh}. + */ + public static ATanh atanhOf(Object value) { + return new ATanh(value); + } + + @Override + protected String getMongoMethod() { + return "$atanh"; + } + } + + /** + * {@link Rand} returns a floating value between 0 and 1. + * + * @author Mushtaq Ahmed + * @since 3.3 + */ + public static class Rand implements AggregationExpression { + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document("$rand", new Document()); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArrayOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArrayOperators.java index dba40d94d5..a8cb58d17c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArrayOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArrayOperators.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,6 +23,8 @@ import org.bson.Document; import org.springframework.data.domain.Range; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.AsBuilder; import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Reduce.PropertyExpression; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; @@ -35,6 +37,7 @@ * @author Christoph Strobl * @author Mark Paluch * @author Shashank Sharma + * @author Divya Srivastava * @since 1.0 */ public class ArrayOperators { @@ -77,7 +80,7 @@ public static class ArrayOperatorFactory { private final @Nullable String fieldReference; private final @Nullable AggregationExpression expression; - private final @Nullable Collection values; + private final @Nullable Collection values; /** * Creates new {@link ArrayOperatorFactory} for given {@literal fieldReference}. @@ -86,7 +89,7 @@ public static class ArrayOperatorFactory { */ public ArrayOperatorFactory(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.fieldReference = fieldReference; this.expression = null; this.values = null; @@ -99,7 +102,7 @@ public ArrayOperatorFactory(String fieldReference) { */ public ArrayOperatorFactory(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); this.fieldReference = null; this.expression = expression; this.values = null; @@ -113,7 +116,7 @@ public ArrayOperatorFactory(AggregationExpression expression) { */ public ArrayOperatorFactory(Collection values) { - Assert.notNull(values, "Values must not be null!"); + Assert.notNull(values, "Values must not be null"); this.fieldReference = null; this.expression = null; this.values = values; @@ -139,7 +142,7 @@ public ArrayElemAt elementAt(int position) { */ public ArrayElemAt elementAt(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createArrayElemAt().elementAt(expression); } @@ -152,7 +155,7 @@ public ArrayElemAt elementAt(AggregationExpression expression) { */ public ArrayElemAt elementAt(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createArrayElemAt().elementAt(fieldReference); } @@ -174,7 +177,7 @@ private ArrayElemAt createArrayElemAt() { */ public ConcatArrays concat(String arrayFieldReference) { - Assert.notNull(arrayFieldReference, "ArrayFieldReference must not be null!"); + Assert.notNull(arrayFieldReference, "ArrayFieldReference must not be null"); return createConcatArrays().concat(arrayFieldReference); } @@ -187,7 +190,7 @@ public ConcatArrays concat(String arrayFieldReference) { */ public ConcatArrays concat(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createConcatArrays().concat(expression); } @@ -212,7 +215,11 @@ public AsBuilder filter() { return Filter.filter(fieldReference); } - Assert.state(values != null, "Values must not be null!"); + if (usesExpression()) { + return Filter.filter(expression); + } + + Assert.state(values != null, "Values must not be null"); return Filter.filter(new ArrayList<>(values)); } @@ -223,7 +230,7 @@ public AsBuilder filter() { */ public IsArray isArray() { - Assert.state(values == null, "Does it make sense to call isArray on an array? Maybe just skip it?"); + Assert.state(values == null, "Does it make sense to call isArray on an array; Maybe just skip it"); return usesFieldRef() ? IsArray.isArray(fieldReference) : IsArray.isArray(expression); } @@ -314,6 +321,38 @@ public ArrayOperatorFactory.ReduceInitialValueBuilder reduce(PropertyExpression. .withInitialValue(initialValue).reduce(expressions); } + /** + * Creates new {@link AggregationExpression} that takes the associated array and sorts it by the given {@link Sort + * order}. + * + * @return new instance of {@link SortArray}. + * @since 4.0 + */ + public SortArray sort(Sort sort) { + + if (usesFieldRef()) { + return SortArray.sortArrayOf(fieldReference).by(sort); + } + + return (usesExpression() ? SortArray.sortArrayOf(expression) : SortArray.sortArray(values)).by(sort); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated array and sorts it by the given + * {@link Direction order}. + * + * @return new instance of {@link SortArray}. + * @since 4.5 + */ + public SortArray sort(Direction direction) { + + if (usesFieldRef()) { + return SortArray.sortArrayOf(fieldReference).direction(direction); + } + + return (usesExpression() ? SortArray.sortArrayOf(expression) : SortArray.sortArray(values)).direction(direction); + } + /** * Creates new {@link AggregationExpression} that transposes an array of input arrays so that the first element of * the output array would be an array containing, the first element of the first input array, the first element of @@ -363,6 +402,38 @@ public ArrayToObject toObject() { return usesExpression() ? ArrayToObject.arrayValueOfToObject(expression) : ArrayToObject.arrayToObject(values); } + /** + * Creates new {@link AggregationExpression} that return the first element in the associated array. + * NOTE: Requires MongoDB 4.4 or later. + * + * @return new instance of {@link First}. + * @since 3.4 + */ + public First first() { + + if (usesFieldRef()) { + return First.firstOf(fieldReference); + } + + return usesExpression() ? First.firstOf(expression) : First.first(values); + } + + /** + * Creates new {@link AggregationExpression} that return the last element in the given array. NOTE: + * Requires MongoDB 4.4 or later. + * + * @return new instance of {@link Last}. + * @since 3.4 + */ + public Last last() { + + if (usesFieldRef()) { + return Last.lastOf(fieldReference); + } + + return usesExpression() ? Last.lastOf(expression) : Last.last(values); + } + /** * @author Christoph Strobl */ @@ -417,7 +488,7 @@ protected String getMongoMethod() { */ public static ArrayElemAt arrayOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new ArrayElemAt(asFields(fieldReference)); } @@ -429,7 +500,7 @@ public static ArrayElemAt arrayOf(String fieldReference) { */ public static ArrayElemAt arrayOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new ArrayElemAt(Collections.singletonList(expression)); } @@ -442,7 +513,7 @@ public static ArrayElemAt arrayOf(AggregationExpression expression) { */ public static ArrayElemAt arrayOf(Collection values) { - Assert.notNull(values, "Values must not be null!"); + Assert.notNull(values, "Values must not be null"); return new ArrayElemAt(Collections.singletonList(values)); } @@ -464,19 +535,19 @@ public ArrayElemAt elementAt(int index) { */ public ArrayElemAt elementAt(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new ArrayElemAt(append(expression)); } /** - * Use the element at the index number traken from the given field. + * Use the element at the index number taken from the given field. * * @param arrayFieldReference the field name. * @return new instance of {@link ArrayElemAt}. */ public ArrayElemAt elementAt(String arrayFieldReference) { - Assert.notNull(arrayFieldReference, "ArrayReference must not be null!"); + Assert.notNull(arrayFieldReference, "ArrayReference must not be null"); return new ArrayElemAt(append(Fields.field(arrayFieldReference))); } } @@ -505,7 +576,7 @@ protected String getMongoMethod() { */ public static ConcatArrays arrayOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new ConcatArrays(asFields(fieldReference)); } @@ -517,7 +588,7 @@ public static ConcatArrays arrayOf(String fieldReference) { */ public static ConcatArrays arrayOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new ConcatArrays(Collections.singletonList(expression)); } @@ -530,7 +601,7 @@ public static ConcatArrays arrayOf(AggregationExpression expression) { */ public static ConcatArrays arrayOf(Collection values) { - Assert.notNull(values, "Values must not be null!"); + Assert.notNull(values, "Values must not be null"); return new ConcatArrays(Collections.singletonList(values)); } @@ -542,7 +613,7 @@ public static ConcatArrays arrayOf(Collection values) { */ public ConcatArrays concat(String arrayFieldReference) { - Assert.notNull(arrayFieldReference, "ArrayFieldReference must not be null!"); + Assert.notNull(arrayFieldReference, "ArrayFieldReference must not be null"); return new ConcatArrays(append(Fields.field(arrayFieldReference))); } @@ -554,7 +625,7 @@ public ConcatArrays concat(String arrayFieldReference) { */ public ConcatArrays concat(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new ConcatArrays(append(expression)); } } @@ -584,7 +655,7 @@ private Filter() { */ public static AsBuilder filter(String field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); return filter(Fields.field(field)); } @@ -596,10 +667,23 @@ public static AsBuilder filter(String field) { */ public static AsBuilder filter(Field field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); return new FilterExpressionBuilder().filter(field); } + /** + * Set the {@link AggregationExpression} resolving to an array to apply the {@code $filter} to. + * + * @param expression must not be {@literal null}. + * @return never {@literal null}. + * @since 4.2 + */ + public static AsBuilder filter(AggregationExpression expression) { + + Assert.notNull(expression, "Field must not be null"); + return new FilterExpressionBuilder().filter(expression); + } + /** * Set the {@literal values} to apply the {@code $filter} to. * @@ -608,14 +692,10 @@ public static AsBuilder filter(Field field) { */ public static AsBuilder filter(List values) { - Assert.notNull(values, "Values must not be null!"); + Assert.notNull(values, "Values must not be null"); return new FilterExpressionBuilder().filter(values); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(final AggregationOperationContext context) { return toFilter(ExposedFields.from(as), context); @@ -624,8 +704,7 @@ public Document toDocument(final AggregationOperationContext context) { private Document toFilter(ExposedFields exposedFields, AggregationOperationContext context) { Document filterExpression = new Document(); - InheritingExposedFieldsAggregationOperationContext operationContext = new InheritingExposedFieldsAggregationOperationContext( - exposedFields, context); + AggregationOperationContext operationContext = context.inheritAndExpose(exposedFields); filterExpression.putAll(context.getMappedObject(new Document("input", getMappedInput(context)))); filterExpression.put("as", as.getTarget()); @@ -636,18 +715,27 @@ private Document toFilter(ExposedFields exposedFields, AggregationOperationConte } private Object getMappedInput(AggregationOperationContext context) { - return input instanceof Field ? context.getReference((Field) input).toString() : input; + + if (input instanceof Field field) { + return context.getReference(field).toString(); + } + + if (input instanceof AggregationExpression expression) { + return expression.toDocument(context); + } + + return input; } private Object getMappedCondition(AggregationOperationContext context) { - if (!(condition instanceof AggregationExpression)) { + if (!(condition instanceof AggregationExpression aggregationExpression)) { return condition; } NestedDelegatingExpressionAggregationOperationContext nea = new NestedDelegatingExpressionAggregationOperationContext( context, Collections.singleton(as)); - return ((AggregationExpression) condition).toDocument(nea); + return aggregationExpression.toDocument(nea); } /** @@ -670,6 +758,15 @@ public interface InputBuilder { * @return */ AsBuilder filter(Field field); + + /** + * Set the {@link AggregationExpression} resolving to an array to apply the {@code $filter} to. + * + * @param expression must not be {@literal null}. + * @return + * @since 4.1.1 + */ + AsBuilder filter(AggregationExpression expression); } /** @@ -736,74 +833,58 @@ public static InputBuilder newBuilder() { return new FilterExpressionBuilder(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.InputBuilder#filter(java.util.List) - */ @Override public AsBuilder filter(List array) { - Assert.notNull(array, "Array must not be null!"); - filter.input = new ArrayList(array); + Assert.notNull(array, "Array must not be null"); + filter.input = new ArrayList<>(array); return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.InputBuilder#filter(org.springframework.data.mongodb.core.aggregation.Field) - */ @Override public AsBuilder filter(Field field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); filter.input = field; return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.AsBuilder#as(java.lang.String) - */ + @Override + public AsBuilder filter(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + filter.input = expression; + return this; + } + @Override public ConditionBuilder as(String variableName) { - Assert.notNull(variableName, "Variable name must not be null!"); + Assert.notNull(variableName, "Variable name must not be null"); filter.as = new ExposedField(variableName, true); return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.ConditionBuilder#by(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override public Filter by(AggregationExpression condition) { - Assert.notNull(condition, "Condition must not be null!"); + Assert.notNull(condition, "Condition must not be null"); filter.condition = condition; return filter; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.ConditionBuilder#by(java.lang.String) - */ @Override public Filter by(String expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); filter.condition = expression; return filter; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.ConditionBuilder#by(org.bson.Document) - */ @Override public Filter by(Document expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); filter.condition = expression; return filter; } @@ -834,7 +915,7 @@ protected String getMongoMethod() { */ public static IsArray isArray(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new IsArray(Fields.field(fieldReference)); } @@ -846,7 +927,7 @@ public static IsArray isArray(String fieldReference) { */ public static IsArray isArray(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new IsArray(expression); } } @@ -875,7 +956,7 @@ protected String getMongoMethod() { */ public static Size lengthOfArray(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Size(Fields.field(fieldReference)); } @@ -887,7 +968,7 @@ public static Size lengthOfArray(String fieldReference) { */ public static Size lengthOfArray(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Size(expression); } @@ -900,7 +981,7 @@ public static Size lengthOfArray(AggregationExpression expression) { */ public static Size lengthOfArray(Collection values) { - Assert.notNull(values, "Values must not be null!"); + Assert.notNull(values, "Values must not be null"); return new Size(Collections.singletonList(values)); } } @@ -929,7 +1010,7 @@ protected String getMongoMethod() { */ public static Slice sliceArrayOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Slice(asFields(fieldReference)); } @@ -941,7 +1022,7 @@ public static Slice sliceArrayOf(String fieldReference) { */ public static Slice sliceArrayOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Slice(Collections.singletonList(expression)); } @@ -954,18 +1035,30 @@ public static Slice sliceArrayOf(AggregationExpression expression) { */ public static Slice sliceArrayOf(Collection values) { - Assert.notNull(values, "Values must not be null!"); + Assert.notNull(values, "Values must not be null"); return new Slice(Collections.singletonList(values)); } /** * Slice the number of elements. * - * @param nrElements elements to slice. + * @param count number of elements to slice. * @return new instance of {@link Slice}. */ - public Slice itemCount(int nrElements) { - return new Slice(append(nrElements)); + public Slice itemCount(int count) { + return new Slice(append(count)); + } + + /** + * Slice the number of elements. + * + * @param count an {@link AggregationExpression} that evaluates to a numeric value used as number of elements to + * slice. + * @return new instance of {@link Slice}. + * @since 4.5 + */ + public Slice itemCount(AggregationExpression count) { + return new Slice(append(count)); } /** @@ -974,29 +1067,52 @@ public Slice itemCount(int nrElements) { * @param position the start position * @return new instance of {@link SliceElementsBuilder} to create {@link Slice}. */ - public SliceElementsBuilder offset(final int position) { - - return new SliceElementsBuilder() { + public SliceElementsBuilder offset(int position) { + return new SliceElementsBuilder(position); + } - @Override - public Slice itemCount(int nrElements) { - return new Slice(append(position)).itemCount(nrElements); - } - }; + /** + * Slice using offset and count. + * + * @param position the start position + * @return new instance of {@link SliceElementsBuilder} to create {@link Slice}. + */ + public SliceElementsBuilder offset(AggregationExpression position) { + return new SliceElementsBuilder(position); } /** * @author Christoph Strobl */ - public interface SliceElementsBuilder { + public class SliceElementsBuilder { + + private final Object position; + + SliceElementsBuilder(Object position) { + this.position = position; + } /** - * Set the number of elements given {@literal nrElements}. + * Set the number of elements given {@literal count}. * - * @param nrElements - * @return + * @param count number of elements to slice. + * @return new instance of {@link Slice}. */ - Slice itemCount(int nrElements); + public Slice itemCount(int count) { + return new Slice(append(position)).itemCount(count); + } + + /** + * Slice the number of elements. + * + * @param count an {@link AggregationExpression} that evaluates to a numeric value used as number of elements to + * slice. + * @return new instance of {@link Slice}. + * @since 4.5 + */ + public Slice itemCount(AggregationExpression count) { + return new Slice(append(position)).itemCount(count); + } } } @@ -1024,7 +1140,7 @@ protected String getMongoMethod() { */ public static IndexOfArrayBuilder arrayOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new IndexOfArrayBuilder(Fields.field(fieldReference)); } @@ -1036,7 +1152,7 @@ public static IndexOfArrayBuilder arrayOf(String fieldReference) { */ public static IndexOfArrayBuilder arrayOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new IndexOfArrayBuilder(expression); } @@ -1049,7 +1165,7 @@ public static IndexOfArrayBuilder arrayOf(AggregationExpression expression) { */ public static IndexOfArrayBuilder arrayOf(Collection values) { - Assert.notNull(values, "Values must not be null!"); + Assert.notNull(values, "Values must not be null"); return new IndexOfArrayBuilder(values); } @@ -1082,7 +1198,7 @@ private IndexOfArrayBuilder(Object targetArray) { */ public IndexOfArray indexOf(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new IndexOfArray(Arrays.asList(targetArray, value)); } } @@ -1244,9 +1360,6 @@ private Reduce(Object input, Object initialValue, List re this.reduceExpressions = reduceExpressions; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -1274,10 +1387,10 @@ private Object getMappedValue(Object value, AggregationOperationContext context) if (value instanceof Document) { return value; } - if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDocument(context); - } else if (value instanceof Field) { - return context.getReference(((Field) value)).toString(); + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); + } else if (value instanceof Field field) { + return context.getReference(field).toString(); } else { return context.getMappedObject(new Document("###val###", value)).get("###val###"); } @@ -1315,7 +1428,7 @@ public Reduce reduce(PropertyExpression... expressions) { Assert.notNull(expressions, "PropertyExpressions must not be null"); return new Reduce(Fields.field(fieldReference), initialValue, - Arrays.asList(expressions)); + Arrays. asList(expressions)); } }; } @@ -1409,8 +1522,8 @@ public static class PropertyExpression implements AggregationExpression { protected PropertyExpression(String propertyName, AggregationExpression aggregationExpression) { - Assert.notNull(propertyName, "Property name must not be null!"); - Assert.notNull(aggregationExpression, "AggregationExpression must not be null!"); + Assert.notNull(propertyName, "Property name must not be null"); + Assert.notNull(aggregationExpression, "AggregationExpression must not be null"); this.propertyName = propertyName; this.aggregationExpression = aggregationExpression; @@ -1433,9 +1546,6 @@ public PropertyExpression definedAs(AggregationExpression expression) { }; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { return new Document(propertyName, aggregationExpression.toDocument(context)); @@ -1456,24 +1566,15 @@ public interface AsBuilder { } } - public enum Variable implements Field { + public enum Variable implements AggregationVariable { THIS { - @Override - public String getName() { - return "$$this"; - } @Override public String getTarget() { return "$$this"; } - @Override - public boolean isAliased() { - return false; - } - @Override public String toString() { return getName(); @@ -1481,27 +1582,23 @@ public String toString() { }, VALUE { - @Override - public String getName() { - return "$$value"; - } @Override public String getTarget() { return "$$value"; } - @Override - public boolean isAliased() { - return false; - } - @Override public String toString() { return getName(); } }; + @Override + public boolean isInternal() { + return true; + } + /** * Create a {@link Field} reference to a given {@literal property} prefixed with the {@link Variable} identifier. * eg. {@code $$value.product} @@ -1533,6 +1630,16 @@ public String toString() { } }; } + + public static boolean isVariable(Field field) { + + for (Variable var : values()) { + if (field.getTarget().startsWith(var.getTarget())) { + return true; + } + } + return false; + } } } @@ -1560,7 +1667,7 @@ protected String getMongoMethod() { */ public static ZipBuilder arrayOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new ZipBuilder(Fields.field(fieldReference)); } @@ -1572,7 +1679,7 @@ public static ZipBuilder arrayOf(String fieldReference) { */ public static ZipBuilder arrayOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new ZipBuilder(expression); } @@ -1585,7 +1692,7 @@ public static ZipBuilder arrayOf(AggregationExpression expression) { */ public static ZipBuilder arrayOf(Collection values) { - Assert.notNull(values, "Expression must not be null!"); + Assert.notNull(values, "Expression must not be null"); return new ZipBuilder(values); } @@ -1606,7 +1713,7 @@ public Zip useLongestLength() { */ public Zip defaultTo(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Zip(append("defaults", Fields.field(fieldReference))); } @@ -1618,7 +1725,7 @@ public Zip defaultTo(String fieldReference) { */ public Zip defaultTo(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Zip(append("defaults", expression)); } @@ -1630,7 +1737,7 @@ public Zip defaultTo(AggregationExpression expression) { */ public Zip defaultTo(Object[] array) { - Assert.notNull(array, "Array must not be null!"); + Assert.notNull(array, "Array must not be null"); return new Zip(append("defaults", Arrays.asList(array))); } @@ -1640,7 +1747,7 @@ public static class ZipBuilder { private ZipBuilder(Object sourceArray) { - this.sourceArrays = new ArrayList(); + this.sourceArrays = new ArrayList<>(); this.sourceArrays.add(sourceArray); } @@ -1654,17 +1761,17 @@ private ZipBuilder(Object sourceArray) { */ public Zip zip(Object... arrays) { - Assert.notNull(arrays, "Arrays must not be null!"); + Assert.notNull(arrays, "Arrays must not be null"); for (Object value : arrays) { - if (value instanceof String) { - sourceArrays.add(Fields.field((String) value)); + if (value instanceof String stringValue) { + sourceArrays.add(Fields.field(stringValue)); } else { sourceArrays.add(value); } } - return new Zip(Collections.singletonMap("inputs", sourceArrays)); + return new Zip(Collections.singletonMap("inputs", sourceArrays)); } } } @@ -1675,7 +1782,7 @@ public Zip zip(Object... arrays) { * @author Christoph Strobl * @author Shashank Sharma * @see https://docs.mongodb.com/manual/reference/operator/aggregation/in/ + * "https://docs.mongodb.com/manual/reference/operator/aggregation/in/">https://docs.mongodb.com/manual/reference/operator/aggregation/in/ * @since 2.2 */ public static class In extends AbstractAggregationExpression { @@ -1697,11 +1804,11 @@ protected String getMongoMethod() { */ public static InBuilder arrayOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return value -> { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new In(Arrays.asList(value, Fields.field(fieldReference))); }; } @@ -1714,11 +1821,11 @@ public static InBuilder arrayOf(String fieldReference) { */ public static InBuilder arrayOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return value -> { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new In(Arrays.asList(value, expression)); }; @@ -1733,11 +1840,11 @@ public static InBuilder arrayOf(AggregationExpression expression) { */ public static InBuilder arrayOf(Collection values) { - Assert.notNull(values, "Values must not be null!"); + Assert.notNull(values, "Values must not be null"); return value -> { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new In(Arrays.asList(value, values)); }; @@ -1764,7 +1871,7 @@ public interface InBuilder { * * @author Christoph Strobl * @see https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/ + * "https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/">https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/ * @since 2.1 */ public static class ArrayToObject extends AbstractAggregationExpression { @@ -1803,13 +1910,200 @@ public static ArrayToObject arrayValueOfToObject(AggregationExpression expressio return new ArrayToObject(expression); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AbstractAggregationExpression#getMongoMethod() - */ @Override protected String getMongoMethod() { return "$arrayToObject"; } } + + /** + * {@link AggregationExpression} for {@code $first} that returns the first element in an array.
    + * NOTE: Requires MongoDB 4.4 or later. + * + * @author Divya Srivastava + * @author Christoph Strobl + * @since 3.4 + */ + public static class First extends AbstractAggregationExpression { + + private First(Object value) { + super(value); + } + + /** + * Returns the first element in the given array. + * + * @param array must not be {@literal null}. + * @return new instance of {@link First}. + */ + public static First first(Object array) { + return new First(array); + } + + /** + * Returns the first element in the array pointed to by the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link First}. + */ + public static First firstOf(String fieldReference) { + return new First(Fields.field(fieldReference)); + } + + /** + * Returns the first element of the array computed by the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link First}. + */ + public static First firstOf(AggregationExpression expression) { + return new First(expression); + } + + @Override + protected String getMongoMethod() { + return "$first"; + } + } + + /** + * {@link AggregationExpression} for {@code $last} that returns the last element in an array.
    + * NOTE: Requires MongoDB 4.4 or later. + * + * @author Divya Srivastava + * @author Christoph Strobl + * @since 3.4 + */ + public static class Last extends AbstractAggregationExpression { + + private Last(Object value) { + super(value); + } + + /** + * Returns the last element in the given array. + * + * @param array must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public static Last last(Object array) { + return new Last(array); + } + + /** + * Returns the last element in the array pointed to by the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public static Last lastOf(String fieldReference) { + return new Last(Fields.field(fieldReference)); + } + + /** + * Returns the last element of the array computed buy the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public static Last lastOf(AggregationExpression expression) { + return new Last(expression); + } + + @Override + protected String getMongoMethod() { + return "$last"; + } + } + + /** + * {@link AggregationExpression} for {@code $sortArray} that sorts elements in an array.
    + * + * @author Christoph Strobl + * @since 4.0 + */ + public static class SortArray extends AbstractAggregationExpression { + + private SortArray(Object value) { + super(value); + } + + /** + * Returns the given array. + * + * @param array must not be {@literal null}. + * @return new instance of {@link SortArray}. + */ + public static SortArray sortArray(Object array) { + return new SortArray(Collections.singletonMap("input", array)); + } + + /** + * Sorts the elements in the array pointed to by the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link SortArray}. + */ + public static SortArray sortArrayOf(String fieldReference) { + return sortArray(Fields.field(fieldReference)); + } + + /** + * Sorts the elements of the array computed buy the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link SortArray}. + */ + public static SortArray sortArrayOf(AggregationExpression expression) { + return sortArray(expression); + } + + /** + * Set the order to put elements in. + * + * @param sort must not be {@literal null}. + * @return new instance of {@link SortArray}. + */ + public SortArray by(Sort sort) { + return new SortArray(append("sortBy", sort)); + } + + /** + * Order the values for the array in the given direction. + * + * @param direction must not be {@literal null}. + * @return new instance of {@link SortArray}. + * @since 4.5 + */ + public SortArray direction(Direction direction) { + return new SortArray(append("sortBy", direction.isAscending() ? 1 : -1)); + } + + /** + * Sort the array elements by their values in ascending order. Suitable for arrays of simple types (e.g., integers, + * strings). + * + * @return new instance of {@link SortArray}. + * @since 4.5 + */ + public SortArray byValueAscending() { + return direction(Direction.ASC); + } + + /** + * Sort the array elements by their values in descending order. Suitable for arrays of simple types (e.g., integers, + * strings). + * + * @return new instance of {@link SortArray}. + * @since 4.5 + */ + public SortArray byValueDescending() { + return direction(Direction.DESC); + } + + @Override + protected String getMongoMethod() { + return "$sortArray"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BasicAggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BasicAggregationOperation.java new file mode 100644 index 0000000000..4d321c4715 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BasicAggregationOperation.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.bson.Document; +import org.bson.conversions.Bson; + +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.util.ObjectUtils; + +/** + * {@link AggregationOperation} implementation that can return a {@link Document} from a {@link Bson} or {@link String} + * document. + * + * @author Christoph Strobl + * @since 4.0 + */ +record BasicAggregationOperation(Object value) implements AggregationOperation { + + @Override + public Document toDocument(AggregationOperationContext context) { + + if (value instanceof Bson bson) { + return BsonUtils.asDocument(bson, context.getCodecRegistry()); + } + + if (value instanceof String json && BsonUtils.isJsonDocument(json)) { + return BsonUtils.parse(json, context); + } + + throw new IllegalStateException( + String.format("%s cannot be converted to org.bson.Document", ObjectUtils.nullSafeClassName(value))); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BooleanOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BooleanOperators.java index eaee42d96e..69689908c9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BooleanOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BooleanOperators.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -87,7 +87,7 @@ public static class BooleanOperatorFactory { */ public BooleanOperatorFactory(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.fieldReference = fieldReference; this.expression = null; } @@ -99,7 +99,7 @@ public BooleanOperatorFactory(String fieldReference) { */ public BooleanOperatorFactory(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); this.fieldReference = null; this.expression = expression; } @@ -113,7 +113,7 @@ public BooleanOperatorFactory(AggregationExpression expression) { */ public And and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createAnd().andExpression(expression); } @@ -126,7 +126,7 @@ public And and(AggregationExpression expression) { */ public And and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createAnd().andField(fieldReference); } @@ -143,7 +143,7 @@ private And createAnd() { */ public Or or(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createOr().orExpression(expression); } @@ -156,7 +156,7 @@ public Or or(AggregationExpression expression) { */ public Or or(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createOr().orField(fieldReference); } @@ -213,7 +213,7 @@ public static And and(Object... expressions) { */ public And andExpression(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new And(append(expression)); } @@ -225,7 +225,7 @@ public And andExpression(AggregationExpression expression) { */ public And andField(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new And(append(Fields.field(fieldReference))); } @@ -237,7 +237,7 @@ public And andField(String fieldReference) { */ public And andValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new And(append(value)); } } @@ -267,7 +267,7 @@ protected String getMongoMethod() { */ public static Or or(Object... expressions) { - Assert.notNull(expressions, "Expressions must not be null!"); + Assert.notNull(expressions, "Expressions must not be null"); return new Or(Arrays.asList(expressions)); } @@ -279,7 +279,7 @@ public static Or or(Object... expressions) { */ public Or orExpression(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Or(append(expression)); } @@ -291,7 +291,7 @@ public Or orExpression(AggregationExpression expression) { */ public Or orField(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Or(append(Fields.field(fieldReference))); } @@ -303,7 +303,7 @@ public Or orField(String fieldReference) { */ public Or orValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Or(append(value)); } } @@ -333,7 +333,7 @@ protected String getMongoMethod() { */ public static Not not(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Not(asFields(fieldReference)); } @@ -346,7 +346,7 @@ public static Not not(String fieldReference) { */ public static Not not(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Not(Collections.singletonList(expression)); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java index 235c16befe..36492e2a81 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -28,8 +28,7 @@ * We recommend to use the static factory method {@link Aggregation#bucketAuto(String, int)} instead of creating * instances of this class directly. * - * @see https://docs.mongodb.org/manual/reference/aggregation/bucketAuto/ + * @see https://docs.mongodb.org/manual/reference/aggregation/bucketAuto/ * @see BucketOperationSupport * @author Mark Paluch * @author Christoph Strobl @@ -51,7 +50,7 @@ public BucketAutoOperation(Field groupByField, int buckets) { super(groupByField); - Assert.isTrue(buckets > 0, "Number of buckets must be greater 0!"); + Assert.isTrue(buckets > 0, "Number of buckets must be greater 0"); this.buckets = buckets; this.granularity = null; @@ -67,7 +66,7 @@ public BucketAutoOperation(AggregationExpression groupByExpression, int buckets) super(groupByExpression); - Assert.isTrue(buckets > 0, "Number of buckets must be greater 0!"); + Assert.isTrue(buckets > 0, "Number of buckets must be greater 0"); this.buckets = buckets; this.granularity = null; @@ -89,9 +88,6 @@ private BucketAutoOperation(BucketAutoOperation bucketOperation, int buckets, St this.granularity = granularity; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -108,10 +104,6 @@ public Document toDocument(AggregationOperationContext context) { return new Document(getOperator(), options); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#getOperator() - */ @Override public String getOperator() { return "$bucketAuto"; @@ -125,7 +117,7 @@ public String getOperator() { */ public BucketAutoOperation withBuckets(int buckets) { - Assert.isTrue(buckets > 0, "Number of buckets must be greater 0!"); + Assert.isTrue(buckets > 0, "Number of buckets must be greater 0"); return new BucketAutoOperation(this, buckets, granularity); } @@ -140,38 +132,26 @@ public BucketAutoOperation withBuckets(int buckets) { */ public BucketAutoOperation withGranularity(Granularity granularity) { - Assert.notNull(granularity, "Granularity must not be null!"); + Assert.notNull(granularity, "Granularity must not be null"); return new BucketAutoOperation(this, buckets, granularity.getMongoRepresentation()); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#newBucketOperation(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.Outputs) - */ @Override protected BucketAutoOperation newBucketOperation(Outputs outputs) { return new BucketAutoOperation(this, outputs); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutputExpression(java.lang.String, java.lang.Object[]) - */ @Override public ExpressionBucketAutoOperationBuilder andOutputExpression(String expression, Object... params) { return new ExpressionBucketAutoOperationBuilder(expression, this, params); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutput(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override public BucketAutoOperationOutputBuilder andOutput(AggregationExpression expression) { return new BucketAutoOperationOutputBuilder(expression, this); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutput(java.lang.String) - */ @Override public BucketAutoOperationOutputBuilder andOutput(String fieldName) { return new BucketAutoOperationOutputBuilder(Fields.field(fieldName), this); @@ -193,9 +173,6 @@ protected BucketAutoOperationOutputBuilder(Object value, BucketAutoOperation ope super(value, operation); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder#apply(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OperationOutput) - */ @Override protected BucketAutoOperationOutputBuilder apply(OperationOutput operationOutput) { return new BucketAutoOperationOutputBuilder(operationOutput, this.operation); @@ -224,9 +201,6 @@ protected ExpressionBucketAutoOperationBuilder(String expression, BucketAutoOper super(expression, operation, parameters); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder#apply(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OperationOutput) - */ @Override protected BucketAutoOperationOutputBuilder apply(OperationOutput operationOutput) { return new BucketAutoOperationOutputBuilder(operationOutput, this.operation); @@ -248,8 +222,7 @@ public interface Granularity { /** * Supported MongoDB granularities. * - * @see https://docs.mongodb.com/manual/reference/operator/aggregation/bucketAuto/#granularity * @author Mark Paluch */ public enum Granularities implements Granularity { @@ -272,9 +245,6 @@ public enum Granularities implements Granularity { this.granularity = granularity; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.GranularitytoMongoGranularity() - */ @Override public String getMongoRepresentation() { return granularity; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java index 173fa4ece5..525789e628 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -31,8 +31,7 @@ * We recommend to use the static factory method {@link Aggregation#bucket(String)} instead of creating instances of * this class directly. * - * @see https://docs.mongodb.org/manual/reference/aggregation/bucket/ + * @see https://docs.mongodb.org/manual/reference/aggregation/bucket/ * @see BucketOperationSupport * @author Mark Paluch * @since 1.10 @@ -81,13 +80,10 @@ private BucketOperation(BucketOperation bucketOperation, List boundaries super(bucketOperation); - this.boundaries = new ArrayList(boundaries); + this.boundaries = new ArrayList<>(boundaries); this.defaultBucket = defaultBucket; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -104,10 +100,6 @@ public Document toDocument(AggregationOperationContext context) { return new Document(getOperator(), options); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#getOperator() - */ @Override public String getOperator() { return "$bucket"; @@ -121,7 +113,7 @@ public String getOperator() { */ public BucketOperation withDefaultBucket(Object literal) { - Assert.notNull(literal, "Default bucket literal must not be null!"); + Assert.notNull(literal, "Default bucket literal must not be null"); return new BucketOperation(this, boundaries, literal); } @@ -134,43 +126,31 @@ public BucketOperation withDefaultBucket(Object literal) { */ public BucketOperation withBoundaries(Object... boundaries) { - Assert.notNull(boundaries, "Boundaries must not be null!"); - Assert.noNullElements(boundaries, "Boundaries must not contain null values!"); + Assert.notNull(boundaries, "Boundaries must not be null"); + Assert.noNullElements(boundaries, "Boundaries must not contain null values"); - List newBoundaries = new ArrayList(this.boundaries.size() + boundaries.length); + List newBoundaries = new ArrayList<>(this.boundaries.size() + boundaries.length); newBoundaries.addAll(this.boundaries); newBoundaries.addAll(Arrays.asList(boundaries)); return new BucketOperation(this, newBoundaries, defaultBucket); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#newBucketOperation(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.Outputs) - */ @Override protected BucketOperation newBucketOperation(Outputs outputs) { return new BucketOperation(this, outputs); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutputExpression(java.lang.String, java.lang.Object[]) - */ @Override public ExpressionBucketOperationBuilder andOutputExpression(String expression, Object... params) { return new ExpressionBucketOperationBuilder(expression, this, params); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutput(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override public BucketOperationOutputBuilder andOutput(AggregationExpression expression) { return new BucketOperationOutputBuilder(expression, this); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutput(java.lang.String) - */ @Override public BucketOperationOutputBuilder andOutput(String fieldName) { return new BucketOperationOutputBuilder(Fields.field(fieldName), this); @@ -192,9 +172,6 @@ protected BucketOperationOutputBuilder(Object value, BucketOperation operation) super(value, operation); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder#apply(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OperationOutput) - */ @Override protected BucketOperationOutputBuilder apply(OperationOutput operationOutput) { return new BucketOperationOutputBuilder(operationOutput, this.operation); @@ -222,9 +199,6 @@ protected ExpressionBucketOperationBuilder(String expression, BucketOperation op super(expression, operation, parameters); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder#apply(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OperationOutput) - */ @Override protected BucketOperationOutputBuilder apply(OperationOutput operationOutput) { return new BucketOperationOutputBuilder(operationOutput, this.operation); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java index 6a768a6172..e19ad59a3f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -51,7 +51,7 @@ public abstract class BucketOperationSupport operationSupport) */ protected BucketOperationSupport(BucketOperationSupport operationSupport, Outputs outputs) { - Assert.notNull(operationSupport, "BucketOperationSupport must not be null!"); - Assert.notNull(outputs, "Outputs must not be null!"); + Assert.notNull(operationSupport, "BucketOperationSupport must not be null"); + Assert.notNull(outputs, "Outputs must not be null"); this.groupByField = operationSupport.groupByField; this.groupByExpression = operationSupport.groupByExpression; @@ -141,9 +141,6 @@ public Document toDocument(AggregationOperationContext context) { }); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -159,9 +156,6 @@ public Document toDocument(AggregationOperationContext context) { return document; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields() - */ @Override public ExposedFields getFields() { return outputs.asExposedFields(); @@ -219,8 +213,8 @@ public abstract static class OutputBuilder, T exte */ protected OutputBuilder(Object value, T operation) { - Assert.notNull(value, "Value must not be null or empty!"); - Assert.notNull(operation, "ProjectionOperation must not be null!"); + Assert.notNull(value, "Value must not be null or empty"); + Assert.notNull(operation, "ProjectionOperation must not be null"); this.value = value; this.operation = operation; @@ -327,10 +321,10 @@ public B addToSet() { */ public B apply(String operation, Object... values) { - Assert.hasText(operation, "Operation must not be empty or null!"); - Assert.notNull(value, "Values must not be null!"); + Assert.hasText(operation, "Operation must not be empty or null"); + Assert.notNull(value, "Values must not be null"); - List objects = new ArrayList(values.length + 1); + List objects = new ArrayList<>(values.length + 1); objects.add(value); objects.addAll(Arrays.asList(values)); return apply(new OperationOutput(operation, objects)); @@ -356,12 +350,12 @@ private B apply(Accumulators operation) { */ public T as(String alias) { - if (value instanceof OperationOutput) { - return this.operation.andOutput(((OperationOutput) this.value).withAlias(alias)); + if (value instanceof OperationOutput operationOutput) { + return this.operation.andOutput(operationOutput.withAlias(alias)); } if (value instanceof Field) { - throw new IllegalStateException("Cannot add a field as top-level output. Use accumulator expressions."); + throw new IllegalStateException("Cannot add a field as top-level output; Use accumulator expressions"); } return this.operation @@ -374,7 +368,7 @@ private enum Accumulators { SUM("$sum"), AVG("$avg"), FIRST("$first"), LAST("$last"), MAX("$max"), MIN("$min"), PUSH("$push"), ADDTOSET( "$addToSet"); - private String mongoOperator; + private final String mongoOperator; Accumulators(String mongoOperator) { this.mongoOperator = mongoOperator; @@ -394,7 +388,7 @@ protected static class Outputs implements AggregationExpression { protected static final Outputs EMPTY = new Outputs(); - private List outputs; + private final List outputs; /** * Creates a new, empty {@link Outputs}. @@ -443,7 +437,7 @@ protected ExposedFields asExposedFields() { */ protected Outputs and(Output output) { - Assert.notNull(output, "BucketOutput must not be null!"); + Assert.notNull(output, "BucketOutput must not be null"); return new Outputs(this.outputs, output); } @@ -454,9 +448,6 @@ protected boolean isEmpty() { return outputs.isEmpty(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -489,7 +480,7 @@ protected abstract static class Output implements AggregationExpression { */ protected Output(Field field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); this.field = new ExposedField(field, true); } @@ -525,11 +516,11 @@ public OperationOutput(String operation, Collection values) { super(Fields.field(operation)); - Assert.hasText(operation, "Operation must not be null or empty!"); - Assert.notNull(values, "Values must not be null!"); + Assert.hasText(operation, "Operation must not be null or empty"); + Assert.notNull(values, "Values must not be null"); this.operation = operation; - this.values = new ArrayList(values); + this.values = new ArrayList<>(values); } private OperationOutput(Field field, OperationOutput operationOutput) { @@ -540,10 +531,6 @@ private OperationOutput(Field field, OperationOutput operationOutput) { this.values = operationOutput.values; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -553,18 +540,18 @@ public Document toDocument(AggregationOperationContext context) { protected List getOperationArguments(AggregationOperationContext context) { - List result = new ArrayList(values != null ? values.size() : 1); + List result = new ArrayList<>(values != null ? values.size() : 1); for (Object element : values) { - if (element instanceof Field) { - result.add(context.getReference((Field) element).toString()); - } else if (element instanceof Fields) { - for (Field field : (Fields) element) { + if (element instanceof Field field) { + result.add(context.getReference(field).toString()); + } else if (element instanceof Fields fields) { + for (Field field : fields) { result.add(context.getReference(field).toString()); } - } else if (element instanceof AggregationExpression) { - result.add(((AggregationExpression) element).toDocument(context)); + } else if (element instanceof AggregationExpression aggregationExpression) { + result.add(aggregationExpression.toDocument(context)); } else { result.add(element); } @@ -629,16 +616,13 @@ public SpelExpressionOutput(String expression, Object[] parameters) { super(Fields.field(expression)); - Assert.hasText(expression, "Expression must not be null!"); - Assert.notNull(parameters, "Parameters must not be null!"); + Assert.hasText(expression, "Expression must not be null"); + Assert.notNull(parameters, "Parameters must not be null"); this.expression = expression; this.params = parameters.clone(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.Output#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { return (Document) TRANSFORMER.transform(expression, context, params); @@ -665,9 +649,6 @@ protected AggregationExpressionOutput(Field field, AggregationExpression express this.expression = expression; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.Output#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { return expression.toDocument(context); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ComparisonOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ComparisonOperators.java index 4e5c364db9..f27b7f16cb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ComparisonOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ComparisonOperators.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -60,7 +60,7 @@ public static class ComparisonOperatorFactory { */ public ComparisonOperatorFactory(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.fieldReference = fieldReference; this.expression = null; } @@ -72,7 +72,7 @@ public ComparisonOperatorFactory(String fieldReference) { */ public ComparisonOperatorFactory(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); this.fieldReference = null; this.expression = expression; } @@ -362,7 +362,7 @@ protected String getMongoMethod() { */ public static Cmp valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Cmp(asFields(fieldReference)); } @@ -374,7 +374,7 @@ public static Cmp valueOf(String fieldReference) { */ public static Cmp valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Cmp(Collections.singletonList(expression)); } @@ -386,7 +386,7 @@ public static Cmp valueOf(AggregationExpression expression) { */ public Cmp compareTo(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Cmp(append(Fields.field(fieldReference))); } @@ -398,7 +398,7 @@ public Cmp compareTo(String fieldReference) { */ public Cmp compareTo(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Cmp(append(expression)); } @@ -410,7 +410,7 @@ public Cmp compareTo(AggregationExpression expression) { */ public Cmp compareToValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Cmp(append(value, Expand.KEEP_SOURCE)); } } @@ -439,7 +439,7 @@ protected String getMongoMethod() { */ public static Eq valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Eq(asFields(fieldReference)); } @@ -451,7 +451,7 @@ public static Eq valueOf(String fieldReference) { */ public static Eq valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Eq(Collections.singletonList(expression)); } @@ -463,7 +463,7 @@ public static Eq valueOf(AggregationExpression expression) { */ public Eq equalTo(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Eq(append(Fields.field(fieldReference))); } @@ -475,7 +475,7 @@ public Eq equalTo(String fieldReference) { */ public Eq equalTo(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Eq(append(expression)); } @@ -487,7 +487,7 @@ public Eq equalTo(AggregationExpression expression) { */ public Eq equalToValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Eq(append(value, Expand.KEEP_SOURCE)); } } @@ -516,7 +516,7 @@ protected String getMongoMethod() { */ public static Gt valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Gt(asFields(fieldReference)); } @@ -528,7 +528,7 @@ public static Gt valueOf(String fieldReference) { */ public static Gt valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Gt(Collections.singletonList(expression)); } @@ -540,7 +540,7 @@ public static Gt valueOf(AggregationExpression expression) { */ public Gt greaterThan(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Gt(append(Fields.field(fieldReference))); } @@ -552,7 +552,7 @@ public Gt greaterThan(String fieldReference) { */ public Gt greaterThan(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Gt(append(expression)); } @@ -564,7 +564,7 @@ public Gt greaterThan(AggregationExpression expression) { */ public Gt greaterThanValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Gt(append(value)); } } @@ -593,7 +593,7 @@ protected String getMongoMethod() { */ public static Lt valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Lt(asFields(fieldReference)); } @@ -605,7 +605,7 @@ public static Lt valueOf(String fieldReference) { */ public static Lt valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Lt(Collections.singletonList(expression)); } @@ -617,7 +617,7 @@ public static Lt valueOf(AggregationExpression expression) { */ public Lt lessThan(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Lt(append(Fields.field(fieldReference))); } @@ -629,7 +629,7 @@ public Lt lessThan(String fieldReference) { */ public Lt lessThan(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Lt(append(expression)); } @@ -641,7 +641,7 @@ public Lt lessThan(AggregationExpression expression) { */ public Lt lessThanValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Lt(append(value)); } } @@ -670,7 +670,7 @@ protected String getMongoMethod() { */ public static Gte valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Gte(asFields(fieldReference)); } @@ -682,7 +682,7 @@ public static Gte valueOf(String fieldReference) { */ public static Gte valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Gte(Collections.singletonList(expression)); } @@ -694,7 +694,7 @@ public static Gte valueOf(AggregationExpression expression) { */ public Gte greaterThanEqualTo(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Gte(append(Fields.field(fieldReference))); } @@ -706,7 +706,7 @@ public Gte greaterThanEqualTo(String fieldReference) { */ public Gte greaterThanEqualTo(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Gte(append(expression)); } @@ -718,7 +718,7 @@ public Gte greaterThanEqualTo(AggregationExpression expression) { */ public Gte greaterThanEqualToValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Gte(append(value)); } } @@ -747,7 +747,7 @@ protected String getMongoMethod() { */ public static Lte valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Lte(asFields(fieldReference)); } @@ -759,7 +759,7 @@ public static Lte valueOf(String fieldReference) { */ public static Lte valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Lte(Collections.singletonList(expression)); } @@ -771,7 +771,7 @@ public static Lte valueOf(AggregationExpression expression) { */ public Lte lessThanEqualTo(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Lte(append(Fields.field(fieldReference))); } @@ -783,7 +783,7 @@ public Lte lessThanEqualTo(String fieldReference) { */ public Lte lessThanEqualTo(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Lte(append(expression)); } @@ -795,7 +795,7 @@ public Lte lessThanEqualTo(AggregationExpression expression) { */ public Lte lessThanEqualToValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Lte(append(value)); } } @@ -824,7 +824,7 @@ protected String getMongoMethod() { */ public static Ne valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Ne(asFields(fieldReference)); } @@ -836,7 +836,7 @@ public static Ne valueOf(String fieldReference) { */ public static Ne valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Ne(Collections.singletonList(expression)); } @@ -848,7 +848,7 @@ public static Ne valueOf(AggregationExpression expression) { */ public Ne notEqualTo(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Ne(append(Fields.field(fieldReference))); } @@ -860,7 +860,7 @@ public Ne notEqualTo(String fieldReference) { */ public Ne notEqualTo(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Ne(append(expression)); } @@ -872,7 +872,7 @@ public Ne notEqualTo(AggregationExpression expression) { */ public Ne notEqualToValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Ne(append(value, Expand.KEEP_SOURCE)); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java index 1d3890ce89..323a11895b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; @@ -79,7 +80,7 @@ public static ConditionalOperatorFactory when(CriteriaDefinition criteriaDefinit */ public static IfNull.ThenBuilder ifNull(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return IfNull.ifNull(fieldReference); } @@ -93,7 +94,7 @@ public static IfNull.ThenBuilder ifNull(String fieldReference) { */ public static IfNull.ThenBuilder ifNull(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return IfNull.ifNull(expression); } @@ -136,7 +137,7 @@ public static class ConditionalOperatorFactory { */ public ConditionalOperatorFactory(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.fieldReference = fieldReference; this.expression = null; @@ -150,7 +151,7 @@ public ConditionalOperatorFactory(String fieldReference) { */ public ConditionalOperatorFactory(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); this.fieldReference = null; this.expression = expression; @@ -164,7 +165,7 @@ public ConditionalOperatorFactory(AggregationExpression expression) { */ public ConditionalOperatorFactory(CriteriaDefinition criteriaDefinition) { - Assert.notNull(criteriaDefinition, "CriteriaDefinition must not be null!"); + Assert.notNull(criteriaDefinition, "CriteriaDefinition must not be null"); this.fieldReference = null; this.expression = null; @@ -180,7 +181,7 @@ public ConditionalOperatorFactory(CriteriaDefinition criteriaDefinition) { */ public OtherwiseBuilder then(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return createThenBuilder().then(value); } @@ -193,7 +194,7 @@ public OtherwiseBuilder then(Object value) { */ public OtherwiseBuilder thenValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createThenBuilder().then(expression); } @@ -206,7 +207,7 @@ public OtherwiseBuilder thenValueOf(AggregationExpression expression) { */ public OtherwiseBuilder thenValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createThenBuilder().thenValueOf(fieldReference); } @@ -235,7 +236,7 @@ private boolean usesCriteriaDefinition() { * * @author Mark Paluch * @see https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/ + * "https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/">https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/ */ public static class IfNull implements AggregationExpression { @@ -251,12 +252,13 @@ private IfNull(Object condition, Object value) { /** * Creates new {@link IfNull}. * - * @param fieldReference the field to check for a {@literal null} value, field reference must not be {@literal null}. + * @param fieldReference the field to check for a {@literal null} value, field reference must not be + * {@literal null}. * @return never {@literal null}. */ public static ThenBuilder ifNull(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new IfNullOperatorBuilder().ifNull(fieldReference); } @@ -264,43 +266,49 @@ public static ThenBuilder ifNull(String fieldReference) { * Creates new {@link IfNull}. * * @param expression the expression to check for a {@literal null} value, field reference must not be - * {@literal null}. + * {@literal null}. * @return never {@literal null}. */ public static ThenBuilder ifNull(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new IfNullOperatorBuilder().ifNull(expression); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { - List list = new ArrayList(); + List list = new ArrayList<>(); - if (condition instanceof Field) { - list.add(context.getReference((Field) condition).toString()); - } else if (condition instanceof AggregationExpression) { - list.add(((AggregationExpression) condition).toDocument(context)); + if (condition instanceof Collection collection) { + for (Object val : collection) { + list.add(mapCondition(val, context)); + } } else { - list.add(condition); + list.add(mapCondition(condition, context)); } list.add(resolve(value, context)); - return new Document("$ifNull", list); } + private Object mapCondition(Object condition, AggregationOperationContext context) { + + if (condition instanceof Field field) { + return context.getReference(field).toString(); + } else if (condition instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); + } else { + return condition; + } + } + private Object resolve(Object value, AggregationOperationContext context) { - if (value instanceof Field) { - return context.getReference((Field) value).toString(); - } else if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDocument(context); + if (value instanceof Field field) { + return context.getReference(field).toString(); + } else if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); } else if (value instanceof Document) { return value; } @@ -315,28 +323,48 @@ public interface IfNullBuilder { /** * @param fieldReference the field to check for a {@literal null} value, field reference must not be - * {@literal null}. + * {@literal null}. * @return the {@link ThenBuilder} */ ThenBuilder ifNull(String fieldReference); /** * @param expression the expression to check for a {@literal null} value, field name must not be {@literal null} - * or empty. - * @return the {@link ThenBuilder} + * or empty. + * @return the {@link ThenBuilder}. */ ThenBuilder ifNull(AggregationExpression expression); } + /** + * @author Christoph Strobl + * @since 3.3 + */ + public interface OrBuilder { + + /** + * @param fieldReference the field to check for a {@literal null} value, field reference must not be + * {@literal null}. + * @return the {@link ThenBuilder} + */ + ThenBuilder orIfNull(String fieldReference); + + /** + * @param expression the expression to check for a {@literal null} value, + * @return the {@link ThenBuilder}. + */ + ThenBuilder orIfNull(AggregationExpression expression); + } + /** * @author Mark Paluch */ - public interface ThenBuilder { + public interface ThenBuilder extends OrBuilder { /** * @param value the value to be used if the {@code $ifNull} condition evaluates {@literal true}. Can be a - * {@link Document}, a value that is supported by MongoDB or a value that can be converted to a MongoDB - * representation but must not be {@literal null}. + * {@link Document}, a value that is supported by MongoDB or a value that can be converted to a MongoDB + * representation but must not be {@literal null}. * @return new instance of {@link IfNull}. */ IfNull then(Object value); @@ -361,9 +389,10 @@ public interface ThenBuilder { */ static final class IfNullOperatorBuilder implements IfNullBuilder, ThenBuilder { - private @Nullable Object condition; + private @Nullable List conditions; private IfNullOperatorBuilder() { + conditions = new ArrayList<>(); } /** @@ -375,50 +404,45 @@ public static IfNullOperatorBuilder newBuilder() { return new IfNullOperatorBuilder(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull.IfNullBuilder#ifNull(java.lang.String) - */ public ThenBuilder ifNull(String fieldReference) { - Assert.hasText(fieldReference, "FieldReference name must not be null or empty!"); - this.condition = Fields.field(fieldReference); + Assert.hasText(fieldReference, "FieldReference name must not be null or empty"); + this.conditions.add(Fields.field(fieldReference)); return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull.IfNullBuilder#ifNull(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override public ThenBuilder ifNull(AggregationExpression expression) { - Assert.notNull(expression, "AggregationExpression name must not be null or empty!"); - this.condition = expression; + Assert.notNull(expression, "AggregationExpression name must not be null or empty"); + this.conditions.add(expression); return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull.ThenBuilder#then(java.lang.Object) - */ + @Override + public ThenBuilder orIfNull(String fieldReference) { + return ifNull(fieldReference); + } + + @Override + public ThenBuilder orIfNull(AggregationExpression expression) { + return ifNull(expression); + } + public IfNull then(Object value) { - return new IfNull(condition, value); + return new IfNull(conditions, value); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull.ThenBuilder#thenValueOf(java.lang.String) - */ public IfNull thenValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new IfNull(condition, Fields.field(fieldReference)); + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new IfNull(conditions, Fields.field(fieldReference)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull.ThenBuilder#thenValueOf(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ public IfNull thenValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); - return new IfNull(condition, expression); + Assert.notNull(expression, "Expression must not be null"); + return new IfNull(conditions, expression); } } } @@ -446,7 +470,7 @@ protected String getMongoMethod() { */ public static Switch switchCases(CaseOperator... conditions) { - Assert.notNull(conditions, "Conditions must not be null!"); + Assert.notNull(conditions, "Conditions must not be null"); return switchCases(Arrays.asList(conditions)); } @@ -457,8 +481,8 @@ public static Switch switchCases(CaseOperator... conditions) { */ public static Switch switchCases(List conditions) { - Assert.notNull(conditions, "Conditions must not be null!"); - return new Switch(Collections.singletonMap("branches", new ArrayList(conditions))); + Assert.notNull(conditions, "Conditions must not be null"); + return new Switch(Collections.singletonMap("branches", new ArrayList<>(conditions))); } /** @@ -487,31 +511,28 @@ private CaseOperator(AggregationExpression when, Object then) { public static ThenBuilder when(final AggregationExpression condition) { - Assert.notNull(condition, "Condition must not be null!"); + Assert.notNull(condition, "Condition must not be null"); return new ThenBuilder() { @Override public CaseOperator then(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new CaseOperator(condition, value); } }; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { Document dbo = new Document("case", when.toDocument(context)); - if (then instanceof AggregationExpression) { - dbo.put("then", ((AggregationExpression) then).toDocument(context)); - } else if (then instanceof Field) { - dbo.put("then", context.getReference((Field) then).toString()); + if (then instanceof AggregationExpression aggregationExpression) { + dbo.put("then", aggregationExpression.toDocument(context)); + } else if (then instanceof Field field) { + dbo.put("then", context.getReference(field).toString()); } else { dbo.put("then", then); } @@ -545,7 +566,7 @@ public interface ThenBuilder { * @author Mark Paluch * @author Christoph Strobl * @see https://docs.mongodb.com/manual/reference/operator/aggregation/cond/ + * "https://docs.mongodb.com/manual/reference/operator/aggregation/cond/">https://docs.mongodb.com/manual/reference/operator/aggregation/cond/ */ public static class Cond implements AggregationExpression { @@ -577,9 +598,9 @@ private Cond(CriteriaDefinition condition, Object thenValue, Object otherwiseVal private Cond(Object condition, Object thenValue, Object otherwiseValue) { - Assert.notNull(condition, "Condition must not be null!"); - Assert.notNull(thenValue, "Then value must not be null!"); - Assert.notNull(otherwiseValue, "Otherwise value must not be null!"); + Assert.notNull(condition, "Condition must not be null"); + Assert.notNull(thenValue, "Then value must not be null"); + Assert.notNull(otherwiseValue, "Otherwise value must not be null"); assertNotBuilder(condition, "Condition"); assertNotBuilder(thenValue, "Then value"); @@ -590,10 +611,6 @@ private Cond(Object condition, Object thenValue, Object otherwiseValue) { this.otherwiseValue = otherwiseValue; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -612,8 +629,8 @@ private Object resolveValue(AggregationOperationContext context, Object value) { return resolve(context, value); } - if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDocument(context); + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); } return context.getMappedObject(new Document("$set", value)).get("$set"); @@ -625,27 +642,24 @@ private Object resolveCriteria(AggregationOperationContext context, Object value return resolve(context, value); } - if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDocument(context); + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); } - if (value instanceof CriteriaDefinition) { - - Document mappedObject = context.getMappedObject(((CriteriaDefinition) value).getCriteriaObject()); - List clauses = new ArrayList(); - - clauses.addAll(getClauses(context, mappedObject)); + if (value instanceof CriteriaDefinition criteriaDefinition) { + Document mappedObject = context.getMappedObject(criteriaDefinition.getCriteriaObject()); + List clauses = getClauses(context, mappedObject); return clauses.size() == 1 ? clauses.get(0) : clauses; } throw new InvalidDataAccessApiUsageException( - String.format("Invalid value in condition. Supported: Document, Field references, Criteria, got: %s", value)); + String.format("Invalid value in condition; Supported: Document, Field references, Criteria, got: %s", value)); } private List getClauses(AggregationOperationContext context, Document mappedObject) { - List clauses = new ArrayList(); + List clauses = new ArrayList<>(); for (String key : mappedObject.keySet()) { @@ -658,21 +672,20 @@ private List getClauses(AggregationOperationContext context, Document ma private List getClauses(AggregationOperationContext context, String key, Object predicate) { - List clauses = new ArrayList(); + List clauses = new ArrayList<>(); - if (predicate instanceof List) { + if (predicate instanceof List predicates) { - List args = new ArrayList(); - for (Object clause : (List) predicate) { - if (clause instanceof Document) { - args.addAll(getClauses(context, (Document) clause)); + List args = new ArrayList<>(predicates.size()); + + for (Object clause : predicates) { + if (clause instanceof Document document) { + args.addAll(getClauses(context, document)); } } clauses.add(new Document(key, args)); - } else if (predicate instanceof Document) { - - Document nested = (Document) predicate; + } else if (predicate instanceof Document nested) { for (String s : nested.keySet()) { @@ -680,14 +693,14 @@ private List getClauses(AggregationOperationContext context, String key, continue; } - List args = new ArrayList(); + List args = new ArrayList<>(2); args.add("$" + key); args.add(nested.get(s)); clauses.add(new Document(s, args)); } } else if (!isKeyword(key)) { - List args = new ArrayList(); + List args = new ArrayList<>(2); args.add("$" + key); args.add(predicate); clauses.add(new Document("$eq", args)); @@ -708,8 +721,8 @@ private boolean isKeyword(String candidate) { private Object resolve(AggregationOperationContext context, Object value) { - if (value instanceof Document) { - return context.getMappedObject((Document) value); + if (value instanceof Document document) { + return context.getMappedObject(document); } return context.getReference((Field) value).toString(); @@ -806,8 +819,8 @@ public interface ThenBuilder { /** * @param value the value to be used if the condition evaluates {@literal true}. Can be a {@link Document}, a - * value that is supported by MongoDB or a value that can be converted to a MongoDB representation but - * must not be {@literal null}. + * value that is supported by MongoDB or a value that can be converted to a MongoDB representation but + * must not be {@literal null}. * @return the {@link OtherwiseBuilder} */ OtherwiseBuilder then(Object value); @@ -832,8 +845,8 @@ public interface OtherwiseBuilder { /** * @param value the value to be used if the condition evaluates {@literal false}. Can be a {@link Document}, a - * value that is supported by MongoDB or a value that can be converted to a MongoDB representation but - * must not be {@literal null}. + * value that is supported by MongoDB or a value that can be converted to a MongoDB representation but + * must not be {@literal null}. * @return the {@link Cond} */ Cond otherwise(Object value); @@ -861,8 +874,7 @@ static class ConditionalExpressionBuilder implements WhenBuilder, ThenBuilder, O private @Nullable Object condition; private @Nullable Object thenValue; - private ConditionalExpressionBuilder() { - } + private ConditionalExpressionBuilder() {} /** * Creates a new builder for {@link Cond}. @@ -873,111 +885,81 @@ public static ConditionalExpressionBuilder newBuilder() { return new ConditionalExpressionBuilder(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.WhenBuilder#when(org.bson.Document) - */ @Override public ConditionalExpressionBuilder when(Document booleanExpression) { - Assert.notNull(booleanExpression, "'Boolean expression' must not be null!"); + Assert.notNull(booleanExpression, "'Boolean expression' must not be null"); this.condition = booleanExpression; return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.WhenBuilder#when(org.springframework.data.mongodb.core.query.CriteriaDefinition) - */ @Override public ThenBuilder when(CriteriaDefinition criteria) { - Assert.notNull(criteria, "Criteria must not be null!"); + Assert.notNull(criteria, "Criteria must not be null"); this.condition = criteria; return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.WhenBuilder#when(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override public ThenBuilder when(AggregationExpression expression) { - Assert.notNull(expression, "AggregationExpression field must not be null!"); + Assert.notNull(expression, "AggregationExpression field must not be null"); this.condition = expression; return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.WhenBuilder#when(java.lang.String) - */ @Override public ThenBuilder when(String booleanField) { - Assert.hasText(booleanField, "Boolean field name must not be null or empty!"); + Assert.hasText(booleanField, "Boolean field name must not be null or empty"); this.condition = Fields.field(booleanField); return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.ThenBuilder#then(java.lang.Object) - */ @Override public OtherwiseBuilder then(Object thenValue) { - Assert.notNull(thenValue, "Then-value must not be null!"); + Assert.notNull(thenValue, "Then-value must not be null"); this.thenValue = thenValue; return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.ThenBuilder#thenValueOf(java.lang.String) - */ @Override public OtherwiseBuilder thenValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.thenValue = Fields.field(fieldReference); return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.ThenBuilder#thenValueOf(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override public OtherwiseBuilder thenValueOf(AggregationExpression expression) { - Assert.notNull(expression, "AggregationExpression must not be null!"); + Assert.notNull(expression, "AggregationExpression must not be null"); this.thenValue = expression; return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.OtherwiseBuilder#otherwise(java.lang.Object) - */ @Override public Cond otherwise(Object otherwiseValue) { - Assert.notNull(otherwiseValue, "Value must not be null!"); + Assert.notNull(otherwiseValue, "Value must not be null"); return new Cond(condition, thenValue, otherwiseValue); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.OtherwiseBuilder#otherwiseValueOf(java.lang.String) - */ @Override public Cond otherwiseValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Cond(condition, thenValue, Fields.field(fieldReference)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.OtherwiseBuilder#otherwiseValueOf(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override public Cond otherwiseValueOf(AggregationExpression expression) { - Assert.notNull(expression, "AggregationExpression must not be null!"); + Assert.notNull(expression, "AggregationExpression must not be null"); return new Cond(condition, thenValue, expression); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java index 315a463e1f..aa085b2a29 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -64,7 +64,7 @@ public static class ConvertOperatorFactory { */ public ConvertOperatorFactory(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.fieldReference = fieldReference; this.expression = null; @@ -77,7 +77,7 @@ public ConvertOperatorFactory(String fieldReference) { */ public ConvertOperatorFactory(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); this.fieldReference = null; this.expression = expression; @@ -231,6 +231,17 @@ public ToString convertToString() { return ToString.toString(valueObject()); } + /** + * {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to + * radians. + * + * @return new instance of {@link DegreesToRadians}. + * @since 3.3 + */ + public DegreesToRadians convertDegreesToRadians() { + return DegreesToRadians.degreesToRadians(valueObject()); + } + private Convert createConvert() { return usesFieldRef() ? Convert.convertValueOf(fieldReference) : Convert.convertValueOf(expression); } @@ -317,9 +328,9 @@ public Convert to(String stringTypeIdentifier) { *
    1
    *
    double
    *
    2
    - *
    string + *
    string
    *
    7
    - *
    objectId + *
    objectId
    *
    8
    *
    bool
    *
    9
    @@ -692,4 +703,52 @@ protected String getMongoMethod() { return "$toString"; } } + + /** + * {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class DegreesToRadians extends AbstractAggregationExpression { + + private DegreesToRadians(Object value) { + super(value); + } + + /** + * Create a new instance of {@link DegreesToRadians} that converts the value of the given field, measured in degrees, to radians. + * + * @param fieldName must not be {@literal null}. + * @return new instance of {@link DegreesToRadians}. + */ + public static DegreesToRadians degreesToRadiansOf(String fieldName) { + return degreesToRadians(Fields.field(fieldName)); + } + + /** + * Create a new instance of {@link DegreesToRadians} that converts the result of the given {@link AggregationExpression expression}, measured in degrees, to radians. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DegreesToRadians}. + */ + public static DegreesToRadians degreesToRadiansOf(AggregationExpression expression) { + return degreesToRadians(expression); + } + + /** + * Create a new instance of {@link DegreesToRadians} that converts the given value, measured in degrees, to radians. + * + * @param value must not be {@literal null}. + * @return new instance of {@link DegreesToRadians}. + */ + public static DegreesToRadians degreesToRadians(Object value) { + return new DegreesToRadians(value); + } + + @Override + protected String getMongoMethod() { + return "$degreesToRadians"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java index e2b65aa7ff..6a6108f832 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -24,8 +24,7 @@ * We recommend to use the static factory method {@link Aggregation#count()} instead of creating instances of this class * directly. * - * @see https://docs.mongodb.com/manual/reference/operator/aggregation/count/ + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/count/ * @author Mark Paluch * @since 1.10 */ @@ -40,13 +39,10 @@ public class CountOperation implements FieldsExposingAggregationOperation { */ public CountOperation(String fieldName) { - Assert.hasText(fieldName, "Field name must not be null or empty!"); + Assert.hasText(fieldName, "Field name must not be null or empty"); this.fieldName = fieldName; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { return new Document(getOperator(), fieldName); @@ -57,9 +53,6 @@ public String getOperator() { return "$count"; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields() - */ @Override public ExposedFields getFields() { return ExposedFields.from(new ExposedField(fieldName, true)); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DataTypeOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DataTypeOperators.java index fbc49ef080..26a85bf2c3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DataTypeOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DataTypeOperators.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -60,7 +60,7 @@ protected String getMongoMethod() { */ public static Type typeOf(String field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); return new Type(Fields.field(field)); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java index 15f10f7d6c..ff6ed7e983 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,9 +15,16 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.temporal.ChronoUnit; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; +import java.util.Locale; import java.util.Map; +import java.util.TimeZone; +import java.util.concurrent.TimeUnit; import org.springframework.lang.Nullable; import org.springframework.util.Assert; @@ -41,10 +48,23 @@ public class DateOperators { */ public static DateOperatorFactory dateOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new DateOperatorFactory(fieldReference); } + /** + * Take the date referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateOperatorFactory}. + * @since 3.3 + */ + public static DateOperatorFactory zonedDateOf(String fieldReference, Timezone timezone) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new DateOperatorFactory(fieldReference).withTimezone(timezone); + } + /** * Take the date resulting from the given {@link AggregationExpression}. * @@ -53,13 +73,25 @@ public static DateOperatorFactory dateOf(String fieldReference) { */ public static DateOperatorFactory dateOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new DateOperatorFactory(expression); } /** - * Take the given value as date. - *

    + * Take the date resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateOperatorFactory}. + * @since 3.3 + */ + public static DateOperatorFactory zonedDateOf(AggregationExpression expression, Timezone timezone) { + + Assert.notNull(expression, "Expression must not be null"); + return new DateOperatorFactory(expression).withTimezone(timezone); + } + + /** + * Take the given value as date.
    * This can be one of: *