diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index 6c5d559a8a..e8f632af23 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -6,7 +6,6 @@ Make sure that:
-->
- [ ] You have read the [Spring Data contribution guidelines](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc).
-- [ ] There is a ticket in the bug tracker for the project in our [JIRA](https://jira.spring.io/browse/DATAMONGO).
- [ ] You use the code formatters provided [here](https://github.com/spring-projects/spring-data-build/tree/master/etc/ide) and have them applied to your changes. Don’t submit any formatting related changes.
- [ ] You submit test cases (unit or integration tests) that back your changes.
- [ ] You added yourself as author in the headers of the classes you touched. Amend the date range in the Apache license header if needed. For new types, add the license header (copy from another file and set the current year only).
diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml
new file mode 100644
index 0000000000..606226523e
--- /dev/null
+++ b/.github/workflows/project.yml
@@ -0,0 +1,47 @@
+# GitHub Actions to automate GitHub issues for Spring Data Project Management
+
+name: Spring Data GitHub Issues
+
+on:
+ issues:
+ types: [opened, edited, reopened]
+ issue_comment:
+ types: [created]
+ pull_request_target:
+ types: [opened, edited, reopened]
+
+jobs:
+ Inbox:
+ runs-on: ubuntu-latest
+ if: github.repository_owner == 'spring-projects' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request == null
+ steps:
+ - name: Create or Update Issue Card
+ uses: peter-evans/create-or-update-project-card@v1.1.2
+ with:
+ project-name: 'Spring Data'
+ column-name: 'Inbox'
+ project-location: 'spring-projects'
+ token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }}
+ Pull-Request:
+ runs-on: ubuntu-latest
+ if: github.repository_owner == 'spring-projects' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request != null
+ steps:
+ - name: Create or Update Pull Request Card
+ uses: peter-evans/create-or-update-project-card@v1.1.2
+ with:
+ project-name: 'Spring Data'
+ column-name: 'Review pending'
+ project-location: 'spring-projects'
+ issue-number: ${{ github.event.pull_request.number }}
+ token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }}
+ Feedback-Provided:
+ runs-on: ubuntu-latest
+ if: github.repository_owner == 'spring-projects' && github.event_name == 'issue_comment' && github.event.action == 'created' && github.actor != 'spring-projects-issues' && github.event.pull_request == null && github.event.issue.state == 'open' && contains(toJSON(github.event.issue.labels), 'waiting-for-feedback')
+ steps:
+ - name: Update Project Card
+ uses: peter-evans/create-or-update-project-card@v1.1.2
+ with:
+ project-name: 'Spring Data'
+ column-name: 'Feedback provided'
+ project-location: 'spring-projects'
+ token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }}
diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties
index 00d32aab1d..dd2d8ef1ee 100755
--- a/.mvn/wrapper/maven-wrapper.properties
+++ b/.mvn/wrapper/maven-wrapper.properties
@@ -1 +1,2 @@
-distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.5.4/apache-maven-3.5.4-bin.zip
\ No newline at end of file
+#Fri Jun 03 09:42:19 CEST 2022
+distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.5/apache-maven-3.8.5-bin.zip
diff --git a/CI.adoc b/CI.adoc
index c6cb467f2b..4e95939a34 100644
--- a/CI.adoc
+++ b/CI.adoc
@@ -1,6 +1,6 @@
= Continuous Integration
-image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Moore%20(master)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
+image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Moore%20(main)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F2.1.x&subject=Lovelace%20(2.1.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F1.10.x&subject=Ingalls%20(1.10.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
diff --git a/CODE_OF_CONDUCT.adoc b/CODE_OF_CONDUCT.adoc
deleted file mode 100644
index 33ae7bc9f1..0000000000
--- a/CODE_OF_CONDUCT.adoc
+++ /dev/null
@@ -1,27 +0,0 @@
-= Contributor Code of Conduct
-
-As contributors and maintainers of this project, and in the interest of fostering an open and welcoming community, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities.
-
-We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, or nationality.
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery
-* Personal attacks
-* Trolling or insulting/derogatory comments
-* Public or private harassment
-* Publishing other's private information, such as physical or electronic addresses,
- without explicit permission
-* Other unethical or unprofessional conduct
-
-Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
-
-By adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently applying these principles to every aspect of managing this project. Project maintainers who do not follow or enforce the Code of Conduct may be permanently removed from the project team.
-
-This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community.
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting a project maintainer at spring-code-of-conduct@pivotal.io.
-All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances.
-Maintainers are obligated to maintain confidentiality with regard to the reporter of an incident.
-
-This Code of Conduct is adapted from the https://contributor-covenant.org[Contributor Covenant], version 1.3.0, available at https://contributor-covenant.org/version/1/3/0/[contributor-covenant.org/version/1/3/0/].
\ No newline at end of file
diff --git a/CONTRIBUTING.adoc b/CONTRIBUTING.adoc
index f007591467..740e8bd0bb 100644
--- a/CONTRIBUTING.adoc
+++ b/CONTRIBUTING.adoc
@@ -1,3 +1,3 @@
= Spring Data contribution guidelines
-You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc[here].
+You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/main/CONTRIBUTING.adoc[here].
diff --git a/Jenkinsfile b/Jenkinsfile
index 62ea203246..565b61da56 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -1,9 +1,15 @@
+def p = [:]
+node {
+ checkout scm
+ p = readProperties interpolate: true, file: 'ci/pipeline.properties'
+}
+
pipeline {
agent none
triggers {
pollSCM 'H/10 * * * *'
- upstream(upstreamProjects: "spring-data-commons/master", threshold: hudson.model.Result.SUCCESS)
+ upstream(upstreamProjects: "spring-data-commons/2.6.x", threshold: hudson.model.Result.SUCCESS)
}
options {
@@ -14,49 +20,77 @@ pipeline {
stages {
stage("Docker images") {
parallel {
- stage('Publish JDK 8 + MongoDB 4.0') {
+ stage('Publish JDK (main) + MongoDB 4.0') {
when {
- changeset "ci/openjdk8-mongodb-4.0/**"
+ anyOf {
+ changeset "ci/openjdk8-mongodb-4.0/**"
+ changeset "ci/pipeline.properties"
+ }
}
agent { label 'data' }
options { timeout(time: 30, unit: 'MINUTES') }
steps {
script {
- def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.0", "ci/openjdk8-mongodb-4.0/")
- docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
+ def image = docker.build("springci/spring-data-with-mongodb-4.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.4.0.version']} ci/openjdk8-mongodb-4.0/")
+ docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
image.push()
}
}
}
}
- stage('Publish JDK 8 + MongoDB 4.2') {
+ stage('Publish JDK (main) + MongoDB 4.4') {
when {
- changeset "ci/openjdk8-mongodb-4.2/**"
+ anyOf {
+ changeset "ci/openjdk8-mongodb-4.4/**"
+ changeset "ci/pipeline.properties"
+ }
}
agent { label 'data' }
options { timeout(time: 30, unit: 'MINUTES') }
steps {
script {
- def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.2.0", "ci/openjdk8-mongodb-4.2/")
- docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
+ def image = docker.build("springci/spring-data-with-mongodb-4.4:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.4.4.version']} ci/openjdk8-mongodb-4.4/")
+ docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
image.push()
}
}
}
}
- stage('Publish JDK 14 + MongoDB 4.2') {
+ stage('Publish JDK (main) + MongoDB 5.0') {
when {
- changeset "ci/openjdk14-mongodb-4.2/**"
+ anyOf {
+ changeset "ci/openjdk8-mongodb-5.0/**"
+ changeset "ci/pipeline.properties"
+ }
}
agent { label 'data' }
options { timeout(time: 30, unit: 'MINUTES') }
steps {
script {
- def image = docker.build("springci/spring-data-openjdk14-with-mongodb-4.2.0", "ci/openjdk14-mongodb-4.2/")
- docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
+ def image = docker.build("springci/spring-data-with-mongodb-5.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.5.0.version']} ci/openjdk8-mongodb-5.0/")
+ docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
+ image.push()
+ }
+ }
+ }
+ }
+ stage('Publish JDK (LTS) + MongoDB 4.4') {
+ when {
+ anyOf {
+ changeset "ci/openjdk17-mongodb-4.4/**"
+ changeset "ci/pipeline.properties"
+ }
+ }
+ agent { label 'data' }
+ options { timeout(time: 30, unit: 'MINUTES') }
+
+ steps {
+ script {
+ def image = docker.build("springci/spring-data-with-mongodb-4.4:${p['java.lts.tag']}", "--build-arg BASE=${p['docker.java.lts.image']} --build-arg MONGODB=${p['docker.mongodb.4.4.version']} ci/openjdk17-mongodb-4.4/")
+ docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
image.push()
}
}
@@ -65,97 +99,107 @@ pipeline {
}
}
- stage("test: baseline (jdk8)") {
+ stage("test: baseline (main)") {
when {
+ beforeAgent(true)
anyOf {
- branch 'master'
+ branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
not { triggeredBy 'UpstreamCause' }
}
}
agent {
- docker {
- image 'springci/spring-data-openjdk8-with-mongodb-4.2.0:latest'
- label 'data'
- args '-v $HOME:/tmp/jenkins-home'
- }
+ label 'data'
}
options { timeout(time: 30, unit: 'MINUTES') }
+ environment {
+ ARTIFACTORY = credentials("${p['artifactory.credentials']}")
+ }
steps {
- sh 'rm -rf ?'
- sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
- sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
- sh 'sleep 10'
- sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
- sh 'sleep 15'
- sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
+ script {
+ docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-4.0:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
+ sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
+ sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
+ sh 'sleep 10'
+ sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
+ sh 'sleep 15'
+ sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
+ }
+ }
}
}
stage("Test other configurations") {
when {
- anyOf {
- branch 'master'
+ beforeAgent(true)
+ allOf {
+ branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
not { triggeredBy 'UpstreamCause' }
}
}
parallel {
- stage("test: mongodb 4.0 (jdk8)") {
+ stage("test: mongodb 4.4 (main)") {
agent {
- docker {
- image 'springci/spring-data-openjdk8-with-mongodb-4.0:latest'
- label 'data'
- args '-v $HOME:/tmp/jenkins-home'
- }
+ label 'data'
}
options { timeout(time: 30, unit: 'MINUTES') }
+ environment {
+ ARTIFACTORY = credentials("${p['artifactory.credentials']}")
+ }
steps {
- sh 'rm -rf ?'
- sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
- sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
- sh 'sleep 10'
- sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
- sh 'sleep 15'
- sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
+ script {
+ docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-4.4:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
+ sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
+ sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
+ sh 'sleep 10'
+ sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
+ sh 'sleep 15'
+ sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
+ }
+ }
}
}
- stage("test: mongodb 4.2 (jdk8)") {
+ stage("test: mongodb 5.0 (main)") {
agent {
- docker {
- image 'springci/spring-data-openjdk8-with-mongodb-4.2.0:latest'
- label 'data'
- args '-v $HOME:/tmp/jenkins-home'
- }
+ label 'data'
}
options { timeout(time: 30, unit: 'MINUTES') }
+ environment {
+ ARTIFACTORY = credentials("${p['artifactory.credentials']}")
+ }
steps {
- sh 'rm -rf ?'
- sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
- sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
- sh 'sleep 10'
- sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
- sh 'sleep 15'
- sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
+ script {
+ docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-5.0:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
+ sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
+ sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
+ sh 'sleep 10'
+ sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
+ sh 'sleep 15'
+ sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
+ }
+ }
}
}
- stage("test: baseline (jdk14)") {
+ stage("test: baseline (LTS)") {
agent {
- docker {
- image 'springci/spring-data-openjdk14-with-mongodb-4.2.0:latest'
- label 'data'
- args '-v $HOME:/tmp/jenkins-home'
- }
+ label 'data'
}
options { timeout(time: 30, unit: 'MINUTES') }
+ environment {
+ ARTIFACTORY = credentials("${p['artifactory.credentials']}")
+ }
steps {
- sh 'rm -rf ?'
- sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
- sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
- sh 'sleep 10'
- sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
- sh 'sleep 15'
- sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pjava11 clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
+ script {
+ docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-4.4:${p['java.lts.tag']}").inside(p['docker.java.inside.basic']) {
+ sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
+ sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
+ sh 'sleep 10'
+ sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
+ sh 'sleep 15'
+ sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
+ }
+ }
}
}
}
@@ -163,62 +207,35 @@ pipeline {
stage('Release to artifactory') {
when {
+ beforeAgent(true)
anyOf {
- branch 'master'
+ branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
not { triggeredBy 'UpstreamCause' }
}
}
agent {
- docker {
- image 'adoptopenjdk/openjdk8:latest'
- label 'data'
- args '-v $HOME:/tmp/jenkins-home'
- }
+ label 'data'
}
options { timeout(time: 20, unit: 'MINUTES') }
environment {
- ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
+ ARTIFACTORY = credentials("${p['artifactory.credentials']}")
}
steps {
- sh 'rm -rf ?'
- sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,artifactory ' +
- '-Dartifactory.server=https://repo.spring.io ' +
- "-Dartifactory.username=${ARTIFACTORY_USR} " +
- "-Dartifactory.password=${ARTIFACTORY_PSW} " +
- "-Dartifactory.staging-repository=libs-snapshot-local " +
- "-Dartifactory.build-name=spring-data-mongodb " +
- "-Dartifactory.build-number=${BUILD_NUMBER} " +
- '-Dmaven.test.skip=true clean deploy -U -B'
- }
- }
-
- stage('Publish documentation') {
- when {
- branch 'master'
- }
- agent {
- docker {
- image 'adoptopenjdk/openjdk8:latest'
- label 'data'
- args '-v $HOME:/tmp/jenkins-home'
+ script {
+ docker.image(p['docker.java.main.image']).inside(p['docker.java.inside.basic']) {
+ sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,artifactory ' +
+ '-Dartifactory.server=https://repo.spring.io ' +
+ "-Dartifactory.username=${ARTIFACTORY_USR} " +
+ "-Dartifactory.password=${ARTIFACTORY_PSW} " +
+ "-Dartifactory.staging-repository=libs-snapshot-local " +
+ "-Dartifactory.build-name=spring-data-mongodb " +
+ "-Dartifactory.build-number=${BUILD_NUMBER} " +
+ '-Dmaven.test.skip=true clean deploy -U -B'
+ }
}
}
- options { timeout(time: 20, unit: 'MINUTES') }
-
- environment {
- ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
- }
-
- steps {
- sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,distribute ' +
- '-Dartifactory.server=https://repo.spring.io ' +
- "-Dartifactory.username=${ARTIFACTORY_USR} " +
- "-Dartifactory.password=${ARTIFACTORY_PSW} " +
- "-Dartifactory.distribution-repository=temp-private-local " +
- '-Dmaven.test.skip=true clean deploy -U -B'
- }
}
}
diff --git a/LICENSE.txt b/LICENSE.txt
new file mode 100644
index 0000000000..ff77379631
--- /dev/null
+++ b/LICENSE.txt
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ https://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/README.adoc b/README.adoc
index a6b69f9747..f526d0d4cc 100644
--- a/README.adoc
+++ b/README.adoc
@@ -1,17 +1,19 @@
-image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start] image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start]
+image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://spring.io/projects/spring-data-mongodb#quick-start] image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://spring.io/projects/spring-data-mongodb#quick-start]
-= Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]]
+= Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]]
-The primary goal of the https://projects.spring.io/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
+The primary goal of the https://spring.io/projects/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
The Spring Data MongoDB project aims to provide a familiar and consistent Spring-based programming model for new datastores while retaining store-specific features and capabilities.
The Spring Data MongoDB project provides integration with the MongoDB document database.
Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB `+Document+` and easily writing a repository style data access layer.
+[[code-of-conduct]]
== Code of Conduct
-This project is governed by the link:CODE_OF_CONDUCT.adoc[Spring Code of Conduct]. By participating, you are expected to uphold this code of conduct. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io.
+This project is governed by the https://github.com/spring-projects/.github/blob/e3cc2ff230d8f1dca06535aa6b5a4a23815861d4/CODE_OF_CONDUCT.md[Spring Code of Conduct]. By participating, you are expected to uphold this code of conduct. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io.
+[[getting-started]]
== Getting Started
Here is a quick teaser of an application using Spring Data Repositories in Java:
@@ -59,6 +61,7 @@ class ApplicationConfig extends AbstractMongoClientConfiguration {
}
----
+[[maven-configuration]]
=== Maven configuration
Add the Maven dependency:
@@ -68,24 +71,25 @@ Add the Maven dependency:
org.springframework.dataspring-data-mongodb
- ${version}.RELEASE
+ ${version}
----
-If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version.
+If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository
+and declare the appropriate dependency version.
[source,xml]
----
org.springframework.dataspring-data-mongodb
- ${version}.BUILD-SNAPSHOT
+ ${version}-SNAPSHOT
- spring-libs-snapshot
+ spring-snapshotSpring Snapshot Repository
- https://repo.spring.io/libs-snapshot
+ https://repo.spring.io/snapshot
----
@@ -98,7 +102,7 @@ Some of the changes affect the initial setup configuration as well as compile/ru
.Changed XML Namespace Elements and Attributes:
|===
-Element / Attribute | 2.x | 3.x
+| Element / Attribute | 2.x | 3.x
| ``
| Used to create a `com.mongodb.MongoClient`
@@ -116,7 +120,7 @@ Use `` instead
.Removed XML Namespace Elements and Attributes:
|===
-Element / Attribute | Replacement in 3.x | Comment
+| Element / Attribute | Replacement in 3.x | Comment
| ``
| ``
@@ -133,7 +137,7 @@ Element / Attribute | Replacement in 3.x | Comment
.New XML Namespace Elements and Attributes:
|===
-Element | Comment
+| Element | Comment
| ``
| Replacement for ``
@@ -153,7 +157,7 @@ Element | Comment
.Java API changes
|===
-Type | Comment
+| Type | Comment
| `MongoClientFactoryBean`
| Creates `com.mongodb.client.MongoClient` instead of `com.mongodb.MongoClient` +
@@ -174,7 +178,7 @@ Uses `MongoClientSettings` instead of `MongoClientOptions`.
.Removed Java API:
|===
-2.x | Replacement in 3.x | Comment
+| 2.x | Replacement in 3.x | Comment
| `MongoClientOptionsFactoryBean`
| `MongoClientSettingsFactoryBean`
@@ -226,6 +230,7 @@ static class Config extends AbstractMongoClientConfiguration {
----
====
+[[getting-help]]
== Getting Help
Having trouble with Spring Data? We’d love to help!
@@ -237,23 +242,98 @@ If you are just starting out with Spring, try one of the https://spring.io/guide
* If you are upgrading, check out the https://docs.spring.io/spring-data/mongodb/docs/current/changelog.txt[changelog] for "`new and noteworthy`" features.
* Ask a question - we monitor https://stackoverflow.com[stackoverflow.com] for questions tagged with https://stackoverflow.com/tags/spring-data[`spring-data-mongodb`].
You can also chat with the community on https://gitter.im/spring-projects/spring-data[Gitter].
-* Report bugs with Spring Data MongoDB at https://jira.spring.io/browse/DATAMONGO[jira.spring.io/browse/DATAMONGO].
+* Report bugs with Spring Data MongoDB at https://github.com/spring-projects/spring-data-mongodb/issues[github.com/spring-projects/spring-data-mongodb/issues].
+[[reporting-issues]]
== Reporting Issues
-Spring Data uses JIRA as issue tracking system to record bugs and feature requests. If you want to raise an issue, please follow the recommendations below:
+Spring Data uses Github as issue tracking system to record bugs and feature requests.
+If you want to raise an issue, please follow the recommendations below:
-* Before you log a bug, please search the
-https://jira.spring.io/browse/DATAMONGO[issue tracker] to see if someone has already reported the problem.
-* If the issue doesn’t already exist, https://jira.spring.io/browse/DATAMONGO[create a new issue].
-* Please provide as much information as possible with the issue report, we like to know the version of Spring Data that you are using and JVM version.
-* If you need to paste code, or include a stack trace use JIRA `{code}…{code}` escapes before and after your text.
-* If possible try to create a test-case or project that replicates the issue. Attach a link to your code or a compressed file containing your code.
+* Before you log a bug, please search the https://github.com/spring-projects/spring-data-mongodb/issues[issue tracker] to see if someone has already reported the problem.
+* If the issue does not already exist, https://github.com/spring-projects/spring-data-mongodb/issues/new[create a new issue].
+* Please provide as much information as possible with the issue report, we like to know the version of Spring Data that you are using, the JVM version, Stacktrace, etc.
+* If you need to paste code, or include a stack trace use https://guides.github.com/features/mastering-markdown/[Markdown] code fences +++```+++.
+[[guides]]
+== Guides
+
+The https://spring.io/[spring.io] site contains several guides that show how to use Spring Data step-by-step:
+
+* https://spring.io/guides/gs/accessing-data-mongodb/[Accessing Data with MongoDB] is a very basic guide that shows you how to create a simple application and how to access data using repositories.
+* https://spring.io/guides/gs/accessing-mongodb-data-rest/[Accessing MongoDB Data with REST] is a guide to creating a REST web service exposing data stored in MongoDB through repositories.
+
+[[examples]]
+== Examples
+
+* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail.
+
+[[building-from-source]]
== Building from Source
-You don’t need to build from source to use Spring Data (binaries in https://repo.spring.io[repo.spring.io]), but if you want to try out the latest and greatest, Spring Data can be easily built with the https://github.com/takari/maven-wrapper[maven wrapper].
-You also need JDK 1.8.
+You do not need to build from source to use Spring Data. Binaries are available in https://repo.spring.io[repo.spring.io].
+and accessible from Maven using the Maven configuration noted <>.
+
+NOTE: Configuration for Gradle is similar to Maven.
+
+The best way to get started is by creating a Spring Boot project using MongoDB on https://start.spring.io[start.spring.io].
+Follow this https://start.spring.io/#type=maven-project&language=java&platformVersion=2.5.4&packaging=jar&jvmVersion=1.8&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb[link]
+to build an imperative application and this https://start.spring.io/#type=maven-project&language=java&platformVersion=2.5.4&packaging=jar&jvmVersion=1.8&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb-reactive[link]
+to build a reactive one.
+
+However, if you want to try out the latest and greatest, Spring Data can be easily built with the https://github.com/takari/maven-wrapper[maven wrapper]
+and minimally JDK 8 (https://www.oracle.com/java/technologies/downloads/[JDK downloads]).
+
+In order to build Spring Data MongoDB, first you will need to https://www.mongodb.com/try/download/community[download]
+and https://docs.mongodb.com/manual/installation/[install a MongoDB distribution].
+
+Once you have installed MongoDB, you need to start a MongoDB server. It is convenient to set an environment variable to
+your MongoDB installation (e.g. `MONGODB_HOME`).
+
+To run the full test suite a https://docs.mongodb.com/manual/tutorial/deploy-replica-set/[MongoDB Replica Set] is required.
+
+To run the MongoDB server enter the following command from a command-line:
+
+[source,bash]
+----
+$ $MONGODB_HOME/bin/mongod --dbpath $MONGODB_HOME/runtime/data --ipv6 --port 27017 --replSet rs0
+...
+"msg":"Successfully connected to host"
+----
+
+Once the MongoDB server starts up, you should see the message (`msg`), "_Successfully connected to host_".
+
+Notice the `--dbpath` option to the `mongod` command. You can set this to anything you like, but in this case, we set
+the absolute path to a sub-directory (`runtime/data/`) under the MongoDB installation directory (in `$MONGODB_HOME`).
+
+You need to initialize the MongoDB replica set only once on the first time the MongoDB server is started.
+To initialize the replica set, start a mongo client:
+
+[source,bash]
+----
+$ $MONGODB_HOME/bin/mongo
+MongoDB server version: 5.0.0
+...
+----
+
+Then enter the following command:
+
+[source,bash]
+----
+mongo> rs.initiate({ _id: 'rs0', members: [ { _id: 0, host: '127.0.0.1:27017' } ] })
+----
+
+Finally, on UNIX-based system (for example, Linux or Mac OS X) you may need to adjust the `ulimit`.
+In case you need to, you can adjust the `ulimit` with the following command (32768 is just a recommendation):
+
+[source,bash]
+----
+$ ulimit -n 32768
+----
+
+You can use `ulimit -a` again to verify the `ulimit` on "_open files_" was set appropriately.
+
+Now you are ready to build Spring Data MongoDB. Simply enter the following `mvnw` (Maven Wrapper) command:
[source,bash]
----
@@ -262,7 +342,8 @@ You also need JDK 1.8.
If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.5.0 or above].
-_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular please sign the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._
+_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular, please sign
+the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._
=== Building reference documentation
@@ -275,17 +356,7 @@ Building the documentation builds also the project without running tests.
The generated documentation is available from `target/site/reference/html/index.html`.
-== Guides
-
-The https://spring.io/[spring.io] site contains several guides that show how to use Spring Data step-by-step:
-
-* https://spring.io/guides/gs/accessing-data-mongodb/[Accessing Data with MongoDB] is a very basic guide that shows you how to create a simple application and how to access data using repositories.
-* https://spring.io/guides/gs/accessing-mongodb-data-rest/[Accessing MongoDB Data with REST] is a guide to creating a REST web service exposing data stored in MongoDB through repositories.
-
-== Examples
-
-* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail.
-
+[[license]]
== License
Spring Data MongoDB is Open Source software released under the https://www.apache.org/licenses/LICENSE-2.0.html[Apache 2.0 license].
diff --git a/ci/openjdk11-mongodb-4.2/Dockerfile b/ci/openjdk11-mongodb-4.2/Dockerfile
deleted file mode 100644
index 0d92eba78d..0000000000
--- a/ci/openjdk11-mongodb-4.2/Dockerfile
+++ /dev/null
@@ -1,15 +0,0 @@
-FROM adoptopenjdk/openjdk11:latest
-
-ENV TZ=Etc/UTC
-ENV DEBIAN_FRONTEND=noninteractive
-
-RUN set -eux; \
- apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
- apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
- echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
- echo ${TZ} > /etc/timezone;
-
-RUN apt-get update ; \
- apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
- apt-get clean; \
- rm -rf /var/lib/apt/lists/*;
diff --git a/ci/openjdk11-mongodb-4.4/Dockerfile b/ci/openjdk11-mongodb-4.4/Dockerfile
new file mode 100644
index 0000000000..abacb005e5
--- /dev/null
+++ b/ci/openjdk11-mongodb-4.4/Dockerfile
@@ -0,0 +1,21 @@
+ARG BASE
+FROM ${BASE}
+# Any ARG statements before FROM are cleared.
+ARG MONGODB
+
+ENV TZ=Etc/UTC
+ENV DEBIAN_FRONTEND=noninteractive
+
+RUN set -eux; \
+ sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
+ sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
+ sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
+ apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
+ apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
+ echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
+ echo ${TZ} > /etc/timezone;
+
+RUN apt-get update ; \
+ apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} ; \
+ apt-get clean; \
+ rm -rf /var/lib/apt/lists/*;
diff --git a/ci/openjdk14-mongodb-4.2/Dockerfile b/ci/openjdk14-mongodb-4.2/Dockerfile
deleted file mode 100644
index 5f7d26c929..0000000000
--- a/ci/openjdk14-mongodb-4.2/Dockerfile
+++ /dev/null
@@ -1,15 +0,0 @@
-FROM adoptopenjdk/openjdk14:latest
-
-ENV TZ=Etc/UTC
-ENV DEBIAN_FRONTEND=noninteractive
-
-RUN set -eux; \
- apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
- apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
- echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
- echo ${TZ} > /etc/timezone;
-
-RUN apt-get update ; \
- apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
- apt-get clean; \
- rm -rf /var/lib/apt/lists/*;
diff --git a/ci/openjdk17-mongodb-4.4/Dockerfile b/ci/openjdk17-mongodb-4.4/Dockerfile
new file mode 100644
index 0000000000..abacb005e5
--- /dev/null
+++ b/ci/openjdk17-mongodb-4.4/Dockerfile
@@ -0,0 +1,21 @@
+ARG BASE
+FROM ${BASE}
+# Any ARG statements before FROM are cleared.
+ARG MONGODB
+
+ENV TZ=Etc/UTC
+ENV DEBIAN_FRONTEND=noninteractive
+
+RUN set -eux; \
+ sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
+ sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
+ sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
+ apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
+ apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
+ echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
+ echo ${TZ} > /etc/timezone;
+
+RUN apt-get update ; \
+ apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} ; \
+ apt-get clean; \
+ rm -rf /var/lib/apt/lists/*;
diff --git a/ci/openjdk8-mongodb-4.0/Dockerfile b/ci/openjdk8-mongodb-4.0/Dockerfile
index 050a1797f5..99586b7961 100644
--- a/ci/openjdk8-mongodb-4.0/Dockerfile
+++ b/ci/openjdk8-mongodb-4.0/Dockerfile
@@ -1,15 +1,21 @@
-FROM adoptopenjdk/openjdk8:latest
+ARG BASE
+FROM ${BASE}
+# Any ARG statements before FROM are cleared.
+ARG MONGODB
ENV TZ=Etc/UTC
ENV DEBIAN_FRONTEND=noninteractive
RUN RUN set -eux; \
+ sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
+ sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
+ sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4 ; \
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list; \
echo ${TZ} > /etc/timezone;
RUN apt-get update ; \
- apt-get install -y mongodb-org=4.0.14 mongodb-org-server=4.0.14 mongodb-org-shell=4.0.14 mongodb-org-mongos=4.0.14 mongodb-org-tools=4.0.14 ; \
+ apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} ; \
apt-get clean; \
rm -rf /var/lib/apt/lists/*;
diff --git a/ci/openjdk8-mongodb-4.2/Dockerfile b/ci/openjdk8-mongodb-4.2/Dockerfile
deleted file mode 100644
index 846a10423b..0000000000
--- a/ci/openjdk8-mongodb-4.2/Dockerfile
+++ /dev/null
@@ -1,15 +0,0 @@
-FROM adoptopenjdk/openjdk8:latest
-
-ENV TZ=Etc/UTC
-ENV DEBIAN_FRONTEND=noninteractive
-
-RUN set -eux; \
- apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
- apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
- echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
- echo ${TZ} > /etc/timezone;
-
-RUN apt-get update ; \
- apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
- apt-get clean; \
- rm -rf /var/lib/apt/lists/*;
diff --git a/ci/openjdk8-mongodb-4.4/Dockerfile b/ci/openjdk8-mongodb-4.4/Dockerfile
new file mode 100644
index 0000000000..87e212dbf6
--- /dev/null
+++ b/ci/openjdk8-mongodb-4.4/Dockerfile
@@ -0,0 +1,23 @@
+ARG BASE
+FROM ${BASE}
+# Any ARG statements before FROM are cleared.
+ARG MONGODB
+
+ENV TZ=Etc/UTC
+ENV DEBIAN_FRONTEND=noninteractive
+
+RUN set -eux; \
+ sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
+ sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
+ sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
+ apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
+ apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
+ echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
+ echo ${TZ} > /etc/timezone;
+
+RUN apt-get update ; \
+ ln -T /bin/true /usr/bin/systemctl ; \
+ apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} ; \
+ rm /usr/bin/systemctl ; \
+ apt-get clean ; \
+ rm -rf /var/lib/apt/lists/* ;
diff --git a/ci/openjdk8-mongodb-5.0/Dockerfile b/ci/openjdk8-mongodb-5.0/Dockerfile
new file mode 100644
index 0000000000..127d2693bc
--- /dev/null
+++ b/ci/openjdk8-mongodb-5.0/Dockerfile
@@ -0,0 +1,23 @@
+ARG BASE
+FROM ${BASE}
+# Any ARG statements before FROM are cleared.
+ARG MONGODB
+
+ENV TZ=Etc/UTC
+ENV DEBIAN_FRONTEND=noninteractive
+
+RUN set -eux; \
+ sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
+ sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
+ sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
+ apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget ; \
+ # MongoDB 5.0 release signing key
+ apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv B00A0BD1E2C63C11 ; \
+ # Needed when MongoDB creates a 5.0 folder.
+ echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/5.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-5.0.list; \
+ echo ${TZ} > /etc/timezone;
+
+RUN apt-get update; \
+ apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} ; \
+ apt-get clean; \
+ rm -rf /var/lib/apt/lists/*;
diff --git a/ci/pipeline.properties b/ci/pipeline.properties
new file mode 100644
index 0000000000..f3c84e0527
--- /dev/null
+++ b/ci/pipeline.properties
@@ -0,0 +1,29 @@
+# Java versions
+java.main.tag=8u322-b06-jdk
+java.next.tag=11.0.14.1_1-jdk
+java.lts.tag=17.0.2_8-jdk
+
+# Docker container images - standard
+docker.java.main.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.main.tag}
+docker.java.next.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.next.tag}
+docker.java.lts.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.lts.tag}
+
+# Supported versions of MongoDB
+docker.mongodb.4.0.version=4.0.28
+docker.mongodb.4.4.version=4.4.12
+docker.mongodb.5.0.version=5.0.6
+
+# Supported versions of Redis
+docker.redis.6.version=6.2.6
+
+# Supported versions of Cassandra
+docker.cassandra.3.version=3.11.12
+
+# Docker environment settings
+docker.java.inside.basic=-v $HOME:/tmp/jenkins-home
+docker.java.inside.docker=-u root -v /var/run/docker.sock:/var/run/docker.sock -v /usr/bin/docker:/usr/bin/docker -v $HOME:/tmp/jenkins-home
+
+# Credentials
+docker.registry=
+docker.credentials=hub.docker.com-springbuildmaster
+artifactory.credentials=02bd1690-b54f-4c9f-819d-a77cb7a9822c
diff --git a/pom.xml b/pom.xml
index ce0fa5e15d..4232689e67 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,17 +5,17 @@
org.springframework.dataspring-data-mongodb-parent
- 3.0.0.RELEASE
+ 3.3.5pomSpring Data MongoDBMongoDB support for Spring Data
- https://projects.spring.io/spring-data-mongodb
+ https://spring.io/projects/spring-data-mongodborg.springframework.data.buildspring-data-parent
- 2.3.0.RELEASE
+ 2.6.5
@@ -26,8 +26,8 @@
multispring-data-mongodb
- 2.3.0.RELEASE
- 4.0.3
+ 2.6.5
+ 4.4.2${mongo}1.19
@@ -112,6 +112,17 @@
+
+ scm:git:https://github.com/spring-projects/spring-data-mongodb.git
+ scm:git:git@github.com:spring-projects/spring-data-mongodb.git
+ https://github.com/spring-projects/spring-data-mongodb
+
+
+
+ GitHub
+ https://github.com/spring-projects/spring-data-mongodb/issues
+
+
benchmarks
@@ -141,11 +152,11 @@
sonatype-libs-snapshothttps://oss.sonatype.org/content/repositories/snapshots
- false
-
+ false
+
- true
-
+ true
+
@@ -158,7 +169,6 @@
spring-libs-milestonehttps://repo.spring.io/libs-milestone
-
diff --git a/settings.xml b/settings.xml
new file mode 100644
index 0000000000..b3227cc110
--- /dev/null
+++ b/settings.xml
@@ -0,0 +1,29 @@
+
+
+
+
+ spring-plugins-release
+ ${env.ARTIFACTORY_USR}
+ ${env.ARTIFACTORY_PSW}
+
+
+ spring-libs-snapshot
+ ${env.ARTIFACTORY_USR}
+ ${env.ARTIFACTORY_PSW}
+
+
+ spring-libs-milestone
+ ${env.ARTIFACTORY_USR}
+ ${env.ARTIFACTORY_PSW}
+
+
+ spring-libs-release
+ ${env.ARTIFACTORY_USR}
+ ${env.ARTIFACTORY_PSW}
+
+
+
+
\ No newline at end of file
diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml
index fa6beb5778..8d083e47c0 100644
--- a/spring-data-mongodb-benchmarks/pom.xml
+++ b/spring-data-mongodb-benchmarks/pom.xml
@@ -7,7 +7,7 @@
org.springframework.dataspring-data-mongodb-parent
- 3.0.0.RELEASE
+ 3.3.5../pom.xml
diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java b/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java
index c7df157f63..ac7932b6e8 100644
--- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java
+++ b/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2020 the original author or authors.
+ * Copyright 2017-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java b/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java
index 5694416f3d..a6c3105fe4 100644
--- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java
+++ b/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2020 the original author or authors.
+ * Copyright 2017-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java b/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java
index a29fb51e3e..196cdcbdc4 100644
--- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java
+++ b/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2020 the original author or authors.
+ * Copyright 2017-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java b/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java
index 4c09f1a166..ea940b3c8e 100644
--- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java
+++ b/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2020 the original author or authors.
+ * Copyright 2017-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java b/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java
index e6d728dbdc..ba34f38b1c 100644
--- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java
+++ b/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2020 the original author or authors.
+ * Copyright 2017-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java b/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java
index 19e7987ee2..ee12058f5f 100644
--- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java
+++ b/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2020 the original author or authors.
+ * Copyright 2017-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java b/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java
index bc43cb5ded..30cbaf6c08 100644
--- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java
+++ b/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2020 the original author or authors.
+ * Copyright 2017-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml
index f0cb34d5f3..9784bffcc5 100644
--- a/spring-data-mongodb-distribution/pom.xml
+++ b/spring-data-mongodb-distribution/pom.xml
@@ -14,7 +14,7 @@
org.springframework.dataspring-data-mongodb-parent
- 3.0.0.RELEASE
+ 3.3.5../pom.xml
diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml
index 6ede5b361b..eae8fe722a 100644
--- a/spring-data-mongodb/pom.xml
+++ b/spring-data-mongodb/pom.xml
@@ -11,7 +11,7 @@
org.springframework.dataspring-data-mongodb-parent
- 3.0.0.RELEASE
+ 3.3.5../pom.xml
@@ -87,6 +87,13 @@
true
+
+ com.google.code.findbugs
+ jsr305
+ 3.0.2
+ true
+
+
@@ -136,6 +143,13 @@
true
+
+ io.reactivex.rxjava3
+ rxjava
+ ${rxjava3}
+ true
+
+
@@ -192,7 +206,14 @@
org.hibernatehibernate-validator
- 5.2.4.Final
+ 5.4.3.Final
+ test
+
+
+
+ org.glassfish
+ javax.el
+ 3.0.1-b11test
@@ -296,6 +317,15 @@
test
+
+
+
+ org.jmolecules
+ jmolecules-ddd
+ ${jmolecules}
+ test
+
+
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java
new file mode 100644
index 0000000000..304ff63d25
--- /dev/null
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java
@@ -0,0 +1,152 @@
+/*
+ * Copyright 2021-2022 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.mongodb;
+
+import java.util.Arrays;
+
+import org.bson.Document;
+import org.bson.codecs.DocumentCodec;
+import org.bson.codecs.configuration.CodecRegistry;
+import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec;
+import org.springframework.data.util.Lazy;
+import org.springframework.lang.Nullable;
+import org.springframework.util.ObjectUtils;
+import org.springframework.util.StringUtils;
+
+/**
+ * A {@link MongoExpression} using the {@link ParameterBindingDocumentCodec} for parsing a raw ({@literal json})
+ * expression. The expression will be wrapped within { ... } if necessary. The actual parsing and parameter
+ * binding of placeholders like {@code ?0} is delayed upon first call on the the target {@link Document} via
+ * {@link #toDocument()}.
+ *
+ *
+ *
+ *
+ * Some types might require a special {@link org.bson.codecs.Codec}. If so, make sure to provide a {@link CodecRegistry}
+ * containing the required {@link org.bson.codecs.Codec codec} via {@link #withCodecRegistry(CodecRegistry)}.
+ *
+ * @author Christoph Strobl
+ * @since 3.2
+ */
+public class BindableMongoExpression implements MongoExpression {
+
+ private final String expressionString;
+
+ private final @Nullable CodecRegistryProvider codecRegistryProvider;
+
+ private final @Nullable Object[] args;
+
+ private final Lazy target;
+
+ /**
+ * Create a new instance of {@link BindableMongoExpression}.
+ *
+ * @param expression must not be {@literal null}.
+ * @param args can be {@literal null}.
+ */
+ public BindableMongoExpression(String expression, @Nullable Object[] args) {
+ this(expression, null, args);
+ }
+
+ /**
+ * Create a new instance of {@link BindableMongoExpression}.
+ *
+ * @param expression must not be {@literal null}.
+ * @param codecRegistryProvider can be {@literal null}.
+ * @param args can be {@literal null}.
+ */
+ public BindableMongoExpression(String expression, @Nullable CodecRegistryProvider codecRegistryProvider,
+ @Nullable Object[] args) {
+
+ this.expressionString = expression;
+ this.codecRegistryProvider = codecRegistryProvider;
+ this.args = args;
+ this.target = Lazy.of(this::parse);
+ }
+
+ /**
+ * Provide the {@link CodecRegistry} used to convert expressions.
+ *
+ * @param codecRegistry must not be {@literal null}.
+ * @return new instance of {@link BindableMongoExpression}.
+ */
+ public BindableMongoExpression withCodecRegistry(CodecRegistry codecRegistry) {
+ return new BindableMongoExpression(expressionString, () -> codecRegistry, args);
+ }
+
+ /**
+ * Provide the arguments to bind to the placeholders via their index.
+ *
+ * @param args must not be {@literal null}.
+ * @return new instance of {@link BindableMongoExpression}.
+ */
+ public BindableMongoExpression bind(Object... args) {
+ return new BindableMongoExpression(expressionString, codecRegistryProvider, args);
+ }
+
+ /*
+ * (non-Javadoc)
+ * @see org.springframework.data.mongodb.MongoExpression#toDocument()
+ */
+ @Override
+ public Document toDocument() {
+ return target.get();
+ }
+
+ /*
+ * (non-Javadoc)
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "BindableMongoExpression{" + "expressionString='" + expressionString + '\'' + ", args="
+ + Arrays.toString(args) + '}';
+ }
+
+ private Document parse() {
+
+ String expression = wrapJsonIfNecessary(expressionString);
+
+ if (ObjectUtils.isEmpty(args)) {
+
+ if (codecRegistryProvider == null) {
+ return Document.parse(expression);
+ }
+
+ return Document.parse(expression, codecRegistryProvider.getCodecFor(Document.class)
+ .orElseGet(() -> new DocumentCodec(codecRegistryProvider.getCodecRegistry())));
+ }
+
+ ParameterBindingDocumentCodec codec = codecRegistryProvider == null ? new ParameterBindingDocumentCodec()
+ : new ParameterBindingDocumentCodec(codecRegistryProvider.getCodecRegistry());
+ return codec.decode(expression, args);
+ }
+
+ private static String wrapJsonIfNecessary(String json) {
+
+ if (StringUtils.hasText(json) && (json.startsWith("{") && json.endsWith("}"))) {
+ return json;
+ }
+
+ return "{" + json + "}";
+ }
+}
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java
index eee31a1e67..3ed4675017 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2015-2020 the original author or authors.
+ * Copyright 2015-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ClientSessionException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ClientSessionException.java
index 46c1c43546..76e73e2394 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ClientSessionException.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ClientSessionException.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2018-2020 the original author or authors.
+ * Copyright 2018-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CodecRegistryProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CodecRegistryProvider.java
index 81d0955f0a..0bed30ccea 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CodecRegistryProvider.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CodecRegistryProvider.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2020 the original author or authors.
+ * Copyright 2017-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/InvalidMongoDbApiUsageException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/InvalidMongoDbApiUsageException.java
index 4d5831dd4d..f380871d25 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/InvalidMongoDbApiUsageException.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/InvalidMongoDbApiUsageException.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2010-2020 the original author or authors.
+ * Copyright 2010-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/LazyLoadingException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/LazyLoadingException.java
index 0ec364f6c8..158b7e12cf 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/LazyLoadingException.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/LazyLoadingException.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2013-2020 the original author or authors.
+ * Copyright 2013-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java
index 4167250634..903bb71101 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2011-2020 the original author or authors.
+ * Copyright 2011-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -20,8 +20,8 @@
/**
* Helper class featuring helper methods for working with MongoDb collections.
- *
- *
+ *
+ *
* Mainly intended for internal use within the framework.
*
* @author Thomas Risberg
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseFactory.java
index 96620d8f8a..8272e126b0 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseFactory.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseFactory.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2011-2019 the original author or authors.
+ * Copyright 2011-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java
index 2c7d3903cd..f80a278057 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2018-2020 the original author or authors.
+ * Copyright 2018-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -30,7 +30,7 @@
* Helper class for managing a {@link MongoDatabase} instances via {@link MongoDatabaseFactory}. Used for obtaining
* {@link ClientSession session bound} resources, such as {@link MongoDatabase} and
* {@link com.mongodb.client.MongoCollection} suitable for transactional usage.
- *
+ *
* Note: Intended for internal usage only.
*
* @author Christoph Strobl
@@ -43,7 +43,7 @@ public class MongoDatabaseUtils {
/**
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory} using
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
- *
+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
*
@@ -56,7 +56,7 @@ public static MongoDatabase getDatabase(MongoDatabaseFactory factory) {
/**
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory}.
- *
+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
*
@@ -71,7 +71,7 @@ public static MongoDatabase getDatabase(MongoDatabaseFactory factory, SessionSyn
/**
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory} using
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
- *
+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
*
@@ -79,13 +79,13 @@ public static MongoDatabase getDatabase(MongoDatabaseFactory factory, SessionSyn
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
*/
- public static MongoDatabase getDatabase(String dbName, MongoDatabaseFactory factory) {
+ public static MongoDatabase getDatabase(@Nullable String dbName, MongoDatabaseFactory factory) {
return doGetMongoDatabase(dbName, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
}
/**
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory}.
- *
+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
*
@@ -94,7 +94,7 @@ public static MongoDatabase getDatabase(String dbName, MongoDatabaseFactory fact
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
*/
- public static MongoDatabase getDatabase(String dbName, MongoDatabaseFactory factory,
+ public static MongoDatabase getDatabase(@Nullable String dbName, MongoDatabaseFactory factory,
SessionSynchronization sessionSynchronization) {
return doGetMongoDatabase(dbName, factory, sessionSynchronization);
}
@@ -104,7 +104,8 @@ private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDa
Assert.notNull(factory, "Factory must not be null!");
- if (!TransactionSynchronizationManager.isSynchronizationActive()) {
+ if (sessionSynchronization == SessionSynchronization.NEVER
+ || !TransactionSynchronizationManager.isSynchronizationActive()) {
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
}
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDbFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDbFactory.java
index aa46fbc882..1190f9470e 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDbFactory.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDbFactory.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2011-2020 the original author or authors.
+ * Copyright 2011-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java
new file mode 100644
index 0000000000..498f835ac1
--- /dev/null
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2021-2022 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.mongodb;
+
+/**
+ * Wrapper object for MongoDB expressions like {@code $toUpper : $name} that manifest as {@link org.bson.Document} when
+ * passed on to the driver.
+ *
+ * A set of predefined {@link MongoExpression expressions}, including a
+ * {@link org.springframework.data.mongodb.core.aggregation.AggregationSpELExpression SpEL based variant} for method
+ * like expressions (eg. {@code toUpper(name)}) are available via the
+ * {@link org.springframework.data.mongodb.core.aggregation Aggregation API}.
+ *
+ * @author Christoph Strobl
+ * @since 3.2
+ * @see org.springframework.data.mongodb.core.aggregation.ArithmeticOperators
+ * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators
+ * @see org.springframework.data.mongodb.core.aggregation.ComparisonOperators
+ * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators
+ * @see org.springframework.data.mongodb.core.aggregation.ConvertOperators
+ * @see org.springframework.data.mongodb.core.aggregation.DateOperators
+ * @see org.springframework.data.mongodb.core.aggregation.ObjectOperators
+ * @see org.springframework.data.mongodb.core.aggregation.SetOperators
+ * @see org.springframework.data.mongodb.core.aggregation.StringOperators
+ */
+@FunctionalInterface
+public interface MongoExpression {
+
+ /**
+ * Create a new {@link MongoExpression} from plain {@link String} (eg. {@code $toUpper : $name}).
+ * The given expression will be wrapped with { ... } to match an actual MongoDB {@link org.bson.Document}
+ * if necessary.
+ *
+ * @param expression must not be {@literal null}.
+ * @return new instance of {@link MongoExpression}.
+ */
+ static MongoExpression create(String expression) {
+ return new BindableMongoExpression(expression, null);
+ }
+
+ /**
+ * Create a new {@link MongoExpression} from plain {@link String} containing placeholders (eg. {@code $toUpper : ?0})
+ * that will be resolved on first call of {@link #toDocument()}.
+ * The given expression will be wrapped with { ... } to match an actual MongoDB {@link org.bson.Document}
+ * if necessary.
+ *
+ * @param expression must not be {@literal null}.
+ * @return new instance of {@link MongoExpression}.
+ */
+ static MongoExpression create(String expression, Object... args) {
+ return new BindableMongoExpression(expression, args);
+ }
+
+ /**
+ * Obtain the native {@link org.bson.Document} representation.
+ *
+ * @return never {@literal null}.
+ */
+ org.bson.Document toDocument();
+}
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java
index c1b80f814f..5528dce38c 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2018-2020 the original author or authors.
+ * Copyright 2018-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -24,7 +24,7 @@
/**
* MongoDB specific {@link ResourceHolderSupport resource holder}, wrapping a {@link ClientSession}.
* {@link MongoTransactionManager} binds instances of this class to the thread.
- *
+ *
* Note: Intended for internal usage only.
*
* @author Christoph Strobl
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoSessionProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoSessionProvider.java
index 47cd65a1c8..202089dab3 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoSessionProvider.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoSessionProvider.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2018-2020 the original author or authors.
+ * Copyright 2018-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionException.java
index 746a769e22..62a3664dc4 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionException.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionException.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2018-2020 the original author or authors.
+ * Copyright 2018-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java
index b0c8d640f7..7aa54472c8 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2018-2020 the original author or authors.
+ * Copyright 2018-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -37,18 +37,18 @@
/**
* A {@link org.springframework.transaction.PlatformTransactionManager} implementation that manages
* {@link ClientSession} based transactions for a single {@link MongoDatabaseFactory}.
- *
+ *
* Binds a {@link ClientSession} from the specified {@link MongoDatabaseFactory} to the thread.
- *
+ *
* {@link TransactionDefinition#isReadOnly() Readonly} transactions operate on a {@link ClientSession} and enable causal
* consistency, and also {@link ClientSession#startTransaction() start}, {@link ClientSession#commitTransaction()
* commit} or {@link ClientSession#abortTransaction() abort} a transaction.
- *
+ *
* Application code is required to retrieve the {@link com.mongodb.client.MongoDatabase} via
* {@link MongoDatabaseUtils#getDatabase(MongoDatabaseFactory)} instead of a standard
* {@link MongoDatabaseFactory#getMongoDatabase()} call. Spring classes such as
* {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly.
- *
+ *
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. One may override
* {@link #doCommit(MongoTransactionObject)} to implement the
* Retry Commit Operation
@@ -69,11 +69,11 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
/**
* Create a new {@link MongoTransactionManager} for bean-style usage.
- *
+ *
* Note:The {@link MongoDatabaseFactory db factory} has to be
* {@link #setDbFactory(MongoDatabaseFactory) set} before using the instance. Use this constructor to prepare a
* {@link MongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
- *
+ *
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
*
@@ -212,8 +212,8 @@ protected final void doCommit(DefaultTransactionStatus status) throws Transactio
* By default those labels are ignored, nevertheless one might check for
* {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the
* commit.
+ *
*
- *
* int retries = 3;
* do {
* try {
@@ -226,8 +226,8 @@ protected final void doCommit(DefaultTransactionStatus status) throws Transactio
* }
* Thread.sleep(500);
* } while (--retries > 0);
- *
*
+ *
*
* @param transactionObject never {@literal null}.
* @throws Exception in case of transaction errors.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java
index 6a54e94e65..77674a41e3 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2016-2020 the original author or authors.
+ * Copyright 2016-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java
index 6138d71a57..98b7c197b0 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2019-2020 the original author or authors.
+ * Copyright 2019-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -36,7 +36,7 @@
* Helper class for managing reactive {@link MongoDatabase} instances via {@link ReactiveMongoDatabaseFactory}. Used for
* obtaining {@link ClientSession session bound} resources, such as {@link MongoDatabase} and {@link MongoCollection}
* suitable for transactional usage.
- *
+ *
* Note: Intended for internal usage only.
*
* @author Mark Paluch
@@ -75,7 +75,7 @@ public static Mono isTransactionActive(ReactiveMongoDatabaseFactory dat
/**
* Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory} using
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
- *
+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
*
@@ -88,7 +88,7 @@ public static Mono getDatabase(ReactiveMongoDatabaseFactory facto
/**
* Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory}.
- *
+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
*
@@ -104,7 +104,7 @@ public static Mono getDatabase(ReactiveMongoDatabaseFactory facto
/**
* Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory
* factory} using {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
- *
+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
*
@@ -119,7 +119,7 @@ public static Mono getDatabase(String dbName, ReactiveMongoDataba
/**
* Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory
* factory}.
- *
+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
*
@@ -138,6 +138,10 @@ private static Mono doGetMongoDatabase(@Nullable String dbName, R
Assert.notNull(factory, "DatabaseFactory must not be null!");
+ if (sessionSynchronization == SessionSynchronization.NEVER) {
+ return getMongoDatabaseOrDefault(dbName, factory);
+ }
+
return TransactionSynchronizationManager.forCurrentTransaction()
.filter(TransactionSynchronizationManager::isSynchronizationActive) //
.flatMap(synchronizationManager -> {
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java
index 34301bdc6f..fdcf4d520c 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2019-2020 the original author or authors.
+ * Copyright 2019-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -24,7 +24,7 @@
/**
* MongoDB specific resource holder, wrapping a {@link ClientSession}. {@link ReactiveMongoTransactionManager} binds
* instances of this class to the subscriber context.
- *
+ *
* Note: Intended for internal usage only.
*
* @author Mark Paluch
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java
index 8792e169cc..d0d9d59c6f 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2019-2020 the original author or authors.
+ * Copyright 2019-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -38,21 +38,21 @@
* A {@link org.springframework.transaction.ReactiveTransactionManager} implementation that manages
* {@link com.mongodb.reactivestreams.client.ClientSession} based transactions for a single
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory}.
- *
+ *
* Binds a {@link ClientSession} from the specified
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory} to the subscriber
* {@link reactor.util.context.Context}.
- *
+ *
* {@link org.springframework.transaction.TransactionDefinition#isReadOnly() Readonly} transactions operate on a
* {@link ClientSession} and enable causal consistency, and also {@link ClientSession#startTransaction() start},
* {@link com.mongodb.reactivestreams.client.ClientSession#commitTransaction() commit} or
* {@link ClientSession#abortTransaction() abort} a transaction.
- *
+ *
* Application code is required to retrieve the {@link com.mongodb.reactivestreams.client.MongoDatabase} via
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory)} instead
* of a standard {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase()} call. Spring
* classes such as {@link org.springframework.data.mongodb.core.ReactiveMongoTemplate} use this strategy implicitly.
- *
+ *
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. You can override
* {@link #doCommit(TransactionSynchronizationManager, ReactiveMongoTransactionObject)} to implement the
* Retry Commit Operation
@@ -71,11 +71,11 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
/**
* Create a new {@link ReactiveMongoTransactionManager} for bean-style usage.
- *
+ *
* Note:The {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory db factory} has to
* be {@link #setDatabaseFactory(ReactiveMongoDatabaseFactory)} set} before using the instance. Use this constructor
* to prepare a {@link ReactiveMongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
- *
+ *
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
*
@@ -110,7 +110,7 @@ public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFact
this.options = options;
}
- /*
+ /*
* (non-Javadoc)
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doGetTransaction(org.springframework.transaction.reactive.TransactionSynchronizationManager)
*/
@@ -123,7 +123,7 @@ protected Object doGetTransaction(TransactionSynchronizationManager synchronizat
return new ReactiveMongoTransactionObject(resourceHolder);
}
- /*
+ /*
* (non-Javadoc)
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#isExistingTransaction(java.lang.Object)
*/
@@ -132,7 +132,7 @@ protected boolean isExistingTransaction(Object transaction) throws TransactionEx
return extractMongoTransaction(transaction).hasResourceHolder();
}
- /*
+ /*
* (non-Javadoc)
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doBegin(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object, org.springframework.transaction.TransactionDefinition)
*/
@@ -175,7 +175,7 @@ protected Mono doBegin(TransactionSynchronizationManager synchronizationMa
});
}
- /*
+ /*
* (non-Javadoc)
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doSuspend(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object)
*/
@@ -192,7 +192,7 @@ protected Mono
+
+ * {@link Encrypted} properties will contain {@literal encrypt} information.
*
* @author Christoph Strobl
* @since 2.2
@@ -60,6 +78,88 @@ public interface MongoJsonSchemaCreator {
*/
MongoJsonSchema createSchemaFor(Class> type);
+ /**
+ * Filter matching {@link JsonSchemaProperty properties}.
+ *
+ * @param filter the {@link Predicate} to evaluate for inclusion. Must not be {@literal null}.
+ * @return new instance of {@link MongoJsonSchemaCreator}.
+ * @since 3.3
+ */
+ MongoJsonSchemaCreator filter(Predicate filter);
+
+ /**
+ * The context in which a specific {@link #getProperty()} is encountered during schema creation.
+ *
+ * @since 3.3
+ */
+ interface JsonSchemaPropertyContext {
+
+ /**
+ * The path to a given field/property in dot notation.
+ *
+ * @return never {@literal null}.
+ */
+ String getPath();
+
+ /**
+ * The current property.
+ *
+ * @return never {@literal null}.
+ */
+ MongoPersistentProperty getProperty();
+
+ /**
+ * Obtain the {@link MongoPersistentEntity} for a given property.
+ *
+ * @param property must not be {@literal null}.
+ * @param
+ * @return {@literal null} if the property is not an entity. It is nevertheless recommend to check
+ * {@link PersistentProperty#isEntity()} first.
+ */
+ @Nullable
+ MongoPersistentEntity resolveEntity(MongoPersistentProperty property);
+
+ }
+
+ /**
+ * A filter {@link Predicate} that matches {@link Encrypted encrypted properties} and those having nested ones.
+ *
+ * @return new instance of {@link Predicate}.
+ * @since 3.3
+ */
+ static Predicate encryptedOnly() {
+
+ return new Predicate() {
+
+ // cycle guard
+ private final Set seen = new HashSet<>();
+
+ @Override
+ public boolean test(JsonSchemaPropertyContext context) {
+ return extracted(context.getProperty(), context);
+ }
+
+ private boolean extracted(MongoPersistentProperty property, JsonSchemaPropertyContext context) {
+ if (property.isAnnotationPresent(Encrypted.class)) {
+ return true;
+ }
+
+ if (!property.isEntity() || seen.contains(property)) {
+ return false;
+ }
+
+ seen.add(property);
+
+ for (MongoPersistentProperty nested : context.resolveEntity(property)) {
+ if (extracted(nested, context)) {
+ return true;
+ }
+ }
+ return false;
+ }
+ };
+ }
+
/**
* Creates a new {@link MongoJsonSchemaCreator} that is aware of conversions applied by the given
* {@link MongoConverter}.
@@ -72,4 +172,41 @@ static MongoJsonSchemaCreator create(MongoConverter mongoConverter) {
Assert.notNull(mongoConverter, "MongoConverter must not be null!");
return new MappingMongoJsonSchemaCreator(mongoConverter);
}
+
+ /**
+ * Creates a new {@link MongoJsonSchemaCreator} that is aware of type mappings and potential
+ * {@link org.springframework.data.spel.spi.EvaluationContextExtension extensions}.
+ *
+ * @param mappingContext must not be {@literal null}.
+ * @return new instance of {@link MongoJsonSchemaCreator}.
+ * @since 3.3
+ */
+ static MongoJsonSchemaCreator create(MappingContext mappingContext) {
+
+ MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
+ converter.setCustomConversions(MongoCustomConversions.create(config -> {}));
+ converter.afterPropertiesSet();
+
+ return create(converter);
+ }
+
+ /**
+ * Creates a new {@link MongoJsonSchemaCreator} that does not consider potential extensions - suitable for testing. We
+ * recommend to use {@link #create(MappingContext)}.
+ *
+ * @return new instance of {@link MongoJsonSchemaCreator}.
+ * @since 3.3
+ */
+ static MongoJsonSchemaCreator create() {
+
+ MongoMappingContext mappingContext = new MongoMappingContext();
+ mappingContext.setSimpleTypeHolder(MongoSimpleTypes.HOLDER);
+ mappingContext.afterPropertiesSet();
+
+ MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
+ converter.setCustomConversions(MongoCustomConversions.create(config -> {}));
+ converter.afterPropertiesSet();
+
+ return create(converter);
+ }
}
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java
index bb6c402fb7..5aa554771d 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2011-2020 the original author or authors.
+ * Copyright 2011-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -58,7 +58,7 @@
* Interface that specifies a basic set of MongoDB operations. Implemented by {@link MongoTemplate}. Not often used but
* a useful option for extensibility and testability (as it can be easily mocked, stubbed, or be the target of a JDK
* proxy).
- *
+ *
* NOTE: Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB
* specific documentation to learn more about Multi
* Document Transactions.
@@ -125,7 +125,7 @@ public interface MongoOperations extends FluentMongoOperations {
/**
* Executes a {@link DbCallback} translating any exceptions as necessary.
- *
+ *
* Allows for returning a result object, that is a domain object or a collection of domain objects.
*
* @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. Must not
@@ -138,7 +138,7 @@ public interface MongoOperations extends FluentMongoOperations {
/**
* Executes the given {@link CollectionCallback} on the entity collection of the specified class.
- *
+ *
* Allows for returning a result object, that is a domain object or a collection of domain objects.
*
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
@@ -151,7 +151,7 @@ public interface MongoOperations extends FluentMongoOperations {
/**
* Executes the given {@link CollectionCallback} on the collection of the given name.
- *
+ *
* Allows for returning a result object, that is a domain object or a collection of domain objects.
*
* @param collectionName the name of the collection that specifies which {@link MongoCollection} instance will be
@@ -176,7 +176,7 @@ public interface MongoOperations extends FluentMongoOperations {
/**
* Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession}
* provided by the given {@link Supplier} to each and every command issued against MongoDB.
- *
+ *
* Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use the
* {@link SessionScoped#execute(SessionCallback, Consumer)} hook to potentially close the {@link ClientSession}.
*
@@ -212,7 +212,7 @@ public T execute(SessionCallback action, Consumer onComple
/**
* Obtain a {@link ClientSession} bound instance of {@link MongoOperations}.
- *
+ *
* Note: It is up to the caller to manage the {@link ClientSession} lifecycle.
*
* @param session must not be {@literal null}.
@@ -300,7 +300,7 @@ public T execute(SessionCallback action, Consumer onComple
* is created on first interaction with the server. Collections can be explicitly created via
* {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class)
* exists} first.
- *
+ *
* Translate any exceptions as necessary.
*
* @param collectionName name of the collection. Must not be {@literal null}.
@@ -310,7 +310,7 @@ public T execute(SessionCallback action, Consumer onComple
/**
* Check to see if a collection with a name indicated by the entity class exists.
- *
+ *
* Translate any exceptions as necessary.
*
* @param entityClass class that determines the name of the collection. Must not be {@literal null}.
@@ -320,7 +320,7 @@ public T execute(SessionCallback action, Consumer onComple
/**
* Check to see if a collection with a given name exists.
- *
+ *
* Translate any exceptions as necessary.
*
* @param collectionName name of the collection. Must not be {@literal null}.
@@ -330,7 +330,7 @@ public T execute(SessionCallback action, Consumer onComple
/**
* Drop the collection with the name indicated by the entity class.
- *
+ *
* Translate any exceptions as necessary.
*
* @param entityClass class that determines the collection to drop/delete. Must not be {@literal null}.
@@ -339,7 +339,7 @@ public T execute(SessionCallback action, Consumer onComple
/**
* Drop the collection with the given name.
- *
+ *
* Translate any exceptions as necessary.
*
* @param collectionName name of the collection to drop/delete.
@@ -403,10 +403,10 @@ public T execute(SessionCallback action, Consumer onComple
/**
* Query for a list of objects of type T from the collection used by the entity class.
- *
+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
- *
+ *
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
* to map objects since the test for class type is done in the client and not on the server.
*
@@ -417,10 +417,10 @@ public T execute(SessionCallback action, Consumer onComple
/**
* Query for a list of objects of type T from the specified collection.
- *
+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
- *
+ *
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
* to map objects since the test for class type is done in the client and not on the server.
*
@@ -539,11 +539,11 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN
/**
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
- *
+ *
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
* needs to be closed. The raw results will be mapped to the given entity class and are returned as stream. The name
* of the inputCollection is derived from the inputType of the aggregation.
- *
+ *
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
* explanation mode will throw an {@link IllegalArgumentException}.
*
@@ -557,10 +557,10 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN
/**
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
- *
+ *
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
* needs to be closed. The raw results will be mapped to the given entity class.
- *
+ *
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
* explanation mode will throw an {@link IllegalArgumentException}.
*
@@ -576,10 +576,10 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN
/**
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
- *
+ *
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
* needs to be closed. The raw results will be mapped to the given entity class.
- *
+ *
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
* explanation mode will throw an {@link IllegalArgumentException}.
*
@@ -702,10 +702,10 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin
/**
* Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the
* specified type.
- *
+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
- *
+ *
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
@@ -720,10 +720,10 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin
/**
* Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified
* type.
- *
+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
- *
+ *
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
@@ -768,10 +768,10 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin
/**
* Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type.
- *
+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
- *
+ *
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
@@ -784,10 +784,10 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin
/**
* Map the results of an ad-hoc query on the specified collection to a List of the specified type.
- *
+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
- *
+ *
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
@@ -881,7 +881,7 @@ default List findDistinct(Query query, String field, String collection, C
}
/**
- * Triggers findAndModify
+ * Triggers findAndModify
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
@@ -897,7 +897,7 @@ default List findDistinct(Query query, String field, String collection, C
T findAndModify(Query query, UpdateDefinition update, Class entityClass);
/**
- * Triggers findAndModify
+ * Triggers findAndModify
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
@@ -914,7 +914,7 @@ default List findDistinct(Query query, String field, String collection, C
T findAndModify(Query query, UpdateDefinition update, Class entityClass, String collectionName);
/**
- * Triggers findAndModify
+ * Triggers findAndModify
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
* {@link FindAndModifyOptions} into account.
*
@@ -934,7 +934,7 @@ default List findDistinct(Query query, String field, String collection, C
T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass);
/**
- * Triggers findAndModify
+ * Triggers findAndModify
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
* {@link FindAndModifyOptions} into account.
*
@@ -957,7 +957,7 @@ T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions o
/**
* Triggers
- * findOneAndReplace
+ * findOneAndReplace
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement}
* document.
* The collection name is derived from the {@literal replacement} type.
@@ -977,7 +977,7 @@ default T findAndReplace(Query query, T replacement) {
/**
* Triggers
- * findOneAndReplace
+ * findOneAndReplace
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement}
* document.
* Options are defaulted to {@link FindAndReplaceOptions#empty()}.
@@ -997,7 +997,7 @@ default T findAndReplace(Query query, T replacement, String collectionName)
/**
* Triggers
- * findOneAndReplace
+ * findOneAndReplace
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
* taking {@link FindAndReplaceOptions} into account.
* NOTE: The replacement entity must not hold an {@literal id}.
@@ -1018,7 +1018,7 @@ default T findAndReplace(Query query, T replacement, FindAndReplaceOptions o
/**
* Triggers
- * findOneAndReplace
+ * findOneAndReplace
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
* taking {@link FindAndReplaceOptions} into account.
* NOTE: The replacement entity must not hold an {@literal id}.
@@ -1041,7 +1041,7 @@ default T findAndReplace(Query query, T replacement, FindAndReplaceOptions o
/**
* Triggers
- * findOneAndReplace
+ * findOneAndReplace
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
* taking {@link FindAndReplaceOptions} into account.
* NOTE: The replacement entity must not hold an {@literal id}.
@@ -1066,7 +1066,7 @@ default T findAndReplace(Query query, T replacement, FindAndReplaceOptions o
/**
* Triggers
- * findOneAndReplace
+ * findOneAndReplace
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
* taking {@link FindAndReplaceOptions} into account.
* NOTE: The replacement entity must not hold an {@literal id}.
@@ -1094,7 +1094,7 @@ default T findAndReplace(Query query, S replacement, FindAndReplaceOption
/**
* Triggers
- * findOneAndReplace
+ * findOneAndReplace
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
* taking {@link FindAndReplaceOptions} into account.
* NOTE: The replacement entity must not hold an {@literal id}.
@@ -1120,9 +1120,9 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option
* Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the
* specified type. The first document that matches the query is returned and also removed from the collection in the
* database.
- *
+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}.
- *
+ *
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
@@ -1137,10 +1137,10 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option
/**
* Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified
* type. The first document that matches the query is returned and also removed from the collection in the database.
- *
+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
- *
+ *
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
@@ -1160,6 +1160,12 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option
* influence on the resulting number of documents found as those values are passed on to the server and potentially
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
* count all matches.
+ *
+ * This method uses an
+ * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
+ * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
+ * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
+ * {@link #estimatedCount(Class)} for empty queries instead.
*
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
* {@literal null}.
@@ -1176,6 +1182,12 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option
* influence on the resulting number of documents found as those values are passed on to the server and potentially
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
* count all matches.
+ *
+ * This method uses an
+ * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
+ * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
+ * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
+ * {@link #estimatedCount(String)} for empty queries instead.
*
* @param query the {@link Query} class that specifies the criteria used to find documents.
* @param collectionName must not be {@literal null} or empty.
@@ -1184,6 +1196,35 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option
*/
long count(Query query, String collectionName);
+ /**
+ * Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type},
+ * based on collection statistics.
+ *
+ * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
+ * transactions.
+ *
+ * @param entityClass must not be {@literal null}.
+ * @return the estimated number of documents.
+ * @since 3.1
+ */
+ default long estimatedCount(Class> entityClass) {
+
+ Assert.notNull(entityClass, "Entity class must not be null!");
+ return estimatedCount(getCollectionName(entityClass));
+ }
+
+ /**
+ * Estimate the number of documents in the given collection based on collection statistics.
+ *
+ * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
+ * transactions.
+ *
+ * @param collectionName must not be {@literal null}.
+ * @return the estimated number of documents.
+ * @since 3.1
+ */
+ long estimatedCount(String collectionName);
+
/**
* Returns the number of documents for the given {@link Query} by querying the given collection using the given entity
* class to map the given {@link Query}.
@@ -1191,6 +1232,12 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option
* influence on the resulting number of documents found as those values are passed on to the server and potentially
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
* count all matches.
+ *
+ * This method uses an
+ * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
+ * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
+ * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
+ * {@link #estimatedCount(String)} for empty queries instead.
*
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
* {@literal null}.
@@ -1202,34 +1249,39 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option
/**
* Insert the object into the collection for the entity type of the object to save.
- *
+ *
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
- *
+ *
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
* Spring's
* Type Conversion" for more details.
- *
- *
+ *
* Insert is used to initially store the object into the database. To update an existing object use the save method.
+ *
+ * The {@code objectToSave} must not be collection-like.
*
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
* @return the inserted object.
+ * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
*/
T insert(T objectToSave);
/**
* Insert the object into the specified collection.
- *
+ *
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
- *
+ *
* Insert is used to initially store the object into the database. To update an existing object use the save method.
+ *
+ * The {@code objectToSave} must not be collection-like.
*
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
* @return the inserted object.
+ * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
*/
T insert(T objectToSave, String collectionName);
@@ -1263,37 +1315,42 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option
/**
* Save the object to the collection for the entity type of the object to save. This will perform an insert if the
* object is not already present, that is an 'upsert'.
- *
+ *
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
- *
+ *
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
* Spring's
* Type Conversion" for more details.
+ *
+ * The {@code objectToSave} must not be collection-like.
*
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
* @return the saved object.
+ * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
*/
T save(T objectToSave);
/**
* Save the object to the specified collection. This will perform an insert if the object is not already present, that
* is an 'upsert'.
- *
+ *
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
- *
+ *
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
- * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's Type
- * Conversion" for more details.
+ * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API.
+ * See Spring's Type Conversion for more details.
+ *
+ * The {@code objectToSave} must not be collection-like.
*
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
* @return the saved object.
+ * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
*/
T save(T objectToSave, String collectionName);
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java
new file mode 100644
index 0000000000..ce7ad5711d
--- /dev/null
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2021-2022 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.mongodb.core;
+
+import org.springframework.beans.factory.FactoryBean;
+import org.springframework.lang.Nullable;
+import org.springframework.util.ObjectUtils;
+
+import com.mongodb.ServerApi;
+import com.mongodb.ServerApi.Builder;
+import com.mongodb.ServerApiVersion;
+
+/**
+ * {@link FactoryBean} for creating {@link ServerApi} using the {@link ServerApi.Builder}.
+ *
+ * @author Christoph Strobl
+ * @since 3.3
+ */
+public class MongoServerApiFactoryBean implements FactoryBean {
+
+ private String version;
+ private @Nullable Boolean deprecationErrors;
+ private @Nullable Boolean strict;
+
+ /**
+ * @param version the version string either as the enum name or the server version value.
+ * @see ServerApiVersion
+ */
+ public void setVersion(String version) {
+ this.version = version;
+ }
+
+ /**
+ * @param deprecationErrors
+ * @see ServerApi.Builder#deprecationErrors(boolean)
+ */
+ public void setDeprecationErrors(@Nullable Boolean deprecationErrors) {
+ this.deprecationErrors = deprecationErrors;
+ }
+
+ /**
+ * @param strict
+ * @see ServerApi.Builder#strict(boolean)
+ */
+ public void setStrict(@Nullable Boolean strict) {
+ this.strict = strict;
+ }
+
+ @Nullable
+ @Override
+ public ServerApi getObject() throws Exception {
+
+ Builder builder = ServerApi.builder().version(version());
+
+ if (deprecationErrors != null) {
+ builder = builder.deprecationErrors(deprecationErrors);
+ }
+ if (strict != null) {
+ builder = builder.strict(strict);
+ }
+ return builder.build();
+ }
+
+ @Nullable
+ @Override
+ public Class> getObjectType() {
+ return ServerApi.class;
+ }
+
+ private ServerApiVersion version() {
+ try {
+ // lookup by name eg. 'V1'
+ return ObjectUtils.caseInsensitiveValueOf(ServerApiVersion.values(), version);
+ } catch (IllegalArgumentException e) {
+ // or just the version number, eg. just '1'
+ return ServerApiVersion.findByValue(version);
+ }
+ }
+}
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java
index b70ddcbb60..30b4bbcd75 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2010-2020 the original author or authors.
+ * Copyright 2010-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,11 +17,6 @@
import static org.springframework.data.mongodb.core.query.SerializationUtils.*;
-import lombok.AccessLevel;
-import lombok.AllArgsConstructor;
-import lombok.NonNull;
-import lombok.RequiredArgsConstructor;
-
import java.io.IOException;
import java.math.BigDecimal;
import java.math.RoundingMode;
@@ -52,6 +47,7 @@
import org.springframework.data.geo.GeoResult;
import org.springframework.data.geo.GeoResults;
import org.springframework.data.geo.Metric;
+import org.springframework.data.mapping.MappingException;
import org.springframework.data.mapping.callback.EntityCallbacks;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mongodb.MongoDatabaseFactory;
@@ -60,6 +56,7 @@
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
import org.springframework.data.mongodb.core.DefaultBulkOperations.BulkOperationContext;
import org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity;
+import org.springframework.data.mongodb.core.QueryOperations.AggregationDefinition;
import org.springframework.data.mongodb.core.QueryOperations.CountContext;
import org.springframework.data.mongodb.core.QueryOperations.DeleteContext;
import org.springframework.data.mongodb.core.QueryOperations.DistinctQueryContext;
@@ -102,12 +99,12 @@
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.UpdateDefinition;
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
+import org.springframework.data.mongodb.core.timeseries.Granularity;
import org.springframework.data.mongodb.core.validation.Validator;
import org.springframework.data.mongodb.util.BsonUtils;
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
import org.springframework.data.util.CloseableIterator;
import org.springframework.data.util.Optionals;
-import org.springframework.jca.cci.core.ConnectionCallback;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
@@ -160,22 +157,14 @@
* @author Cimon Lucas
* @author Michael J. Simons
* @author Roman Puchkovskiy
+ * @author Yadhukrishna S Pai
+ * @author Anton Barkan
+ * @author Bartłomiej Mazur
*/
public class MongoTemplate implements MongoOperations, ApplicationContextAware, IndexOperationsProvider {
private static final Logger LOGGER = LoggerFactory.getLogger(MongoTemplate.class);
private static final WriteResultChecking DEFAULT_WRITE_RESULT_CHECKING = WriteResultChecking.NONE;
- private static final Collection ITERABLE_CLASSES;
-
- static {
-
- Set iterableClasses = new HashSet<>();
- iterableClasses.add(List.class.getName());
- iterableClasses.add(Collection.class.getName());
- iterableClasses.add(Iterator.class.getName());
-
- ITERABLE_CLASSES = Collections.unmodifiableCollection(iterableClasses);
- }
private final MongoConverter mongoConverter;
private final MappingContext extends MongoPersistentEntity>, MongoPersistentProperty> mappingContext;
@@ -326,6 +315,7 @@ public void setReadPreference(@Nullable ReadPreference readPreference) {
* (non-Javadoc)
* @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext)
*/
+ @Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
prepareIndexCreator(applicationContext);
@@ -349,7 +339,7 @@ public void setApplicationContext(ApplicationContext applicationContext) throws
/**
* Set the {@link EntityCallbacks} instance to use when invoking
* {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the {@link BeforeSaveCallback}.
- *
+ *
* Overrides potentially existing {@link EntityCallbacks}.
*
* @param entityCallbacks must not be {@literal null}.
@@ -391,6 +381,7 @@ private void prepareIndexCreator(ApplicationContext context) {
*
* @return
*/
+ @Override
public MongoConverter getConverter() {
return this.mongoConverter;
}
@@ -530,6 +521,7 @@ protected void executeQuery(Query query, String collectionName, DocumentCallback
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.MongoOperations#execute(org.springframework.data.mongodb.core.DbCallback)
*/
+ @Override
public T execute(DbCallback action) {
Assert.notNull(action, "DbCallback must not be null!");
@@ -546,6 +538,7 @@ public T execute(DbCallback action) {
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.MongoOperations#execute(java.lang.Class, org.springframework.data.mongodb.core.DbCallback)
*/
+ @Override
public T execute(Class> entityClass, CollectionCallback callback) {
Assert.notNull(entityClass, "EntityClass must not be null!");
@@ -556,6 +549,7 @@ public T execute(Class> entityClass, CollectionCallback callback) {
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.MongoOperations#execute(java.lang.String, org.springframework.data.mongodb.core.DbCallback)
*/
+ @Override
public T execute(String collectionName, CollectionCallback callback) {
Assert.notNull(collectionName, "CollectionName must not be null!");
@@ -608,14 +602,16 @@ public void setSessionSynchronization(SessionSynchronization sessionSynchronizat
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.Class)
*/
+ @Override
public MongoCollection createCollection(Class entityClass) {
- return createCollection(entityClass, CollectionOptions.empty());
+ return createCollection(entityClass, operations.forType(entityClass).getCollectionOptions());
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.Class, org.springframework.data.mongodb.core.CollectionOptions)
*/
+ @Override
public MongoCollection createCollection(Class entityClass,
@Nullable CollectionOptions collectionOptions) {
@@ -634,6 +630,7 @@ public MongoCollection createCollection(Class entityClass,
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.String)
*/
+ @Override
public MongoCollection createCollection(String collectionName) {
Assert.notNull(collectionName, "CollectionName must not be null!");
@@ -645,6 +642,7 @@ public MongoCollection createCollection(String collectionName) {
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.String, org.springframework.data.mongodb.core.CollectionOptions)
*/
+ @Override
public MongoCollection createCollection(String collectionName,
@Nullable CollectionOptions collectionOptions) {
@@ -656,6 +654,7 @@ public MongoCollection createCollection(String collectionName,
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.MongoOperations#getCollection(java.lang.String)
*/
+ @Override
@SuppressWarnings("ConstantConditions")
public MongoCollection getCollection(String collectionName) {
@@ -668,6 +667,7 @@ public MongoCollection getCollection(String collectionName) {
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation#getCollection(java.lang.Class)
*/
+ @Override
public boolean collectionExists(Class entityClass) {
return collectionExists(getCollectionName(entityClass));
}
@@ -676,6 +676,7 @@ public boolean collectionExists(Class entityClass) {
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation#getCollection(java.lang.String)
*/
+ @Override
@SuppressWarnings("ConstantConditions")
public boolean collectionExists(String collectionName) {
@@ -696,6 +697,7 @@ public boolean collectionExists(String collectionName) {
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation#dropCollection(java.lang.Class)
*/
+ @Override
public void dropCollection(Class entityClass) {
dropCollection(getCollectionName(entityClass));
}
@@ -704,6 +706,7 @@ public void dropCollection(Class entityClass) {
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation#dropCollection(java.lang.String)
*/
+ @Override
public void dropCollection(String collectionName) {
Assert.notNull(collectionName, "CollectionName must not be null!");
@@ -718,26 +721,34 @@ public void dropCollection(String collectionName) {
});
}
+ @Override
+ public IndexOperations indexOps(String collectionName) {
+ return indexOps(collectionName, null);
+ }
+
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation#indexOps(java.lang.String)
*/
- public IndexOperations indexOps(String collectionName) {
- return new DefaultIndexOperations(this, collectionName, null);
+ @Override
+ public IndexOperations indexOps(String collectionName, @Nullable Class> type) {
+ return new DefaultIndexOperations(this, collectionName, type);
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation#indexOps(java.lang.Class)
*/
+ @Override
public IndexOperations indexOps(Class> entityClass) {
- return new DefaultIndexOperations(this, getCollectionName(entityClass), entityClass);
+ return indexOps(getCollectionName(entityClass), entityClass);
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation#bulkOps(org.springframework.data.mongodb.core.BulkMode, java.lang.String)
*/
+ @Override
public BulkOperations bulkOps(BulkMode bulkMode, String collectionName) {
return bulkOps(bulkMode, null, collectionName);
}
@@ -746,6 +757,7 @@ public BulkOperations bulkOps(BulkMode bulkMode, String collectionName) {
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation#bulkOps(org.springframework.data.mongodb.core.BulkMode, java.lang.Class)
*/
+ @Override
public BulkOperations bulkOps(BulkMode bulkMode, Class> entityClass) {
return bulkOps(bulkMode, entityClass, getCollectionName(entityClass));
}
@@ -754,6 +766,7 @@ public BulkOperations bulkOps(BulkMode bulkMode, Class> entityClass) {
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation#bulkOps(org.springframework.data.mongodb.core.BulkMode, java.lang.Class, java.lang.String)
*/
+ @Override
public BulkOperations bulkOps(BulkMode mode, @Nullable Class> entityType, String collectionName) {
Assert.notNull(mode, "BulkMode must not be null!");
@@ -763,7 +776,6 @@ public BulkOperations bulkOps(BulkMode mode, @Nullable Class> entityType, Stri
new BulkOperationContext(mode, Optional.ofNullable(getPersistentEntity(entityType)), queryMapper, updateMapper,
eventPublisher, entityCallbacks));
- operations.setExceptionTranslator(exceptionTranslator);
operations.setDefaultWriteConcern(writeConcern);
return operations;
@@ -981,7 +993,7 @@ public GeoResults geoNear(NearQuery near, Class> domainType, String col
for (Document element : results) {
GeoResult geoResult = callback.doWith(element);
- aggregate = aggregate.add(new BigDecimal(geoResult.getDistance().getValue()));
+ aggregate = aggregate.add(BigDecimal.valueOf(geoResult.getDistance().getValue()));
result.add(geoResult);
}
@@ -1116,6 +1128,7 @@ public long count(Query query, String collectionName) {
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.MongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
*/
+ @Override
public long count(Query query, @Nullable Class> entityClass, String collectionName) {
Assert.notNull(query, "Query must not be null!");
@@ -1140,6 +1153,19 @@ protected long doCount(String collectionName, Document filter, CountOptions opti
collection -> collection.countDocuments(CountQuery.of(filter).toQueryDocument(), options));
}
+ /*
+ * (non-Javadoc)
+ * @see org.springframework.data.mongodb.core.MongoOperations#estimatedCount(java.lang.String)
+ */
+ @Override
+ public long estimatedCount(String collectionName) {
+ return doEstimatedCount(collectionName, new EstimatedDocumentCountOptions());
+ }
+
+ protected long doEstimatedCount(String collectionName, EstimatedDocumentCountOptions options) {
+ return execute(collectionName, collection -> collection.estimatedDocumentCount(options));
+ }
+
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.MongoOperations#insert(java.lang.Object)
@@ -1168,17 +1194,34 @@ public T insert(T objectToSave, String collectionName) {
return (T) doInsert(collectionName, objectToSave, this.mongoConverter);
}
- protected void ensureNotIterable(@Nullable Object o) {
- if (o != null) {
- if (o.getClass().isArray() || ITERABLE_CLASSES.contains(o.getClass().getName())) {
- throw new IllegalArgumentException("Cannot use a collection here.");
- }
+ /**
+ * Ensure the given {@literal source} is not an {@link java.lang.reflect.Array}, {@link Collection} or
+ * {@link Iterator}.
+ *
+ * @param source can be {@literal null}.
+ * @deprecated since 3.2. Call {@link #ensureNotCollectionLike(Object)} instead.
+ */
+ protected void ensureNotIterable(@Nullable Object source) {
+ ensureNotCollectionLike(source);
+ }
+
+ /**
+ * Ensure the given {@literal source} is not an {@link java.lang.reflect.Array}, {@link Collection} or
+ * {@link Iterator}.
+ *
+ * @param source can be {@literal null}.
+ * @since 3.2.
+ */
+ protected void ensureNotCollectionLike(@Nullable Object source) {
+
+ if (EntityOperations.isCollectionLike(source)) {
+ throw new IllegalArgumentException("Cannot use a collection here.");
}
}
/**
* Prepare the collection before any processing is done using it. This allows a convenient way to apply settings like
- * slaveOk() etc. Can be overridden in sub-classes.
+ * withCodecRegistry() etc. Can be overridden in sub-classes.
*
* @param collection
*/
@@ -1355,13 +1398,13 @@ public T save(T objectToSave, String collectionName) {
Assert.notNull(objectToSave, "Object to save must not be null!");
Assert.hasText(collectionName, "Collection name must not be null or empty!");
+ ensureNotCollectionLike(objectToSave);
AdaptibleEntity source = operations.forEntity(objectToSave, mongoConverter.getConversionService());
return source.isVersionedEntity() //
? doSaveVersioned(source, collectionName) //
: (T) doSave(collectionName, objectToSave, this.mongoConverter);
-
}
@SuppressWarnings("unchecked")
@@ -1826,7 +1869,7 @@ public List mapReduce(Query query, Class> domainType, String inputColle
Document mappedSort = getMappedSortObject(query, domainType);
if (mappedSort != null && !mappedSort.isEmpty()) {
- mapReduce = mapReduce.sort(getMappedSortObject(query, domainType));
+ mapReduce = mapReduce.sort(mappedSort);
}
mapReduce = mapReduce
@@ -1892,10 +1935,12 @@ public List mapReduce(Query query, Class> domainType, String inputColle
return mappedResults;
}
+ @Override
public GroupByResults group(String inputCollectionName, GroupBy groupBy, Class entityClass) {
return group(null, inputCollectionName, groupBy, entityClass);
}
+ @Override
public GroupByResults group(@Nullable Criteria criteria, String inputCollectionName, GroupBy groupBy,
Class entityClass) {
@@ -1969,9 +2014,7 @@ public AggregationResults aggregate(TypedAggregation> aggregation, Stri
Assert.notNull(aggregation, "Aggregation pipeline must not be null!");
- AggregationOperationContext context = new TypeBasedAggregationOperationContext(aggregation.getInputType(),
- mappingContext, queryMapper);
- return aggregate(aggregation, inputCollectionName, outputType, context);
+ return aggregate(aggregation, inputCollectionName, outputType, null);
}
/* (non-Javadoc)
@@ -1981,7 +2024,7 @@ public AggregationResults aggregate(TypedAggregation> aggregation, Stri
public AggregationResults aggregate(Aggregation aggregation, Class> inputType, Class outputType) {
return aggregate(aggregation, getCollectionName(inputType), outputType,
- new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper));
+ queryOperations.createAggregation(aggregation, inputType).getAggregationOperationContext());
}
/* (non-Javadoc)
@@ -2088,9 +2131,13 @@ protected AggregationResults aggregate(Aggregation aggregation, String co
Assert.notNull(aggregation, "Aggregation pipeline must not be null!");
Assert.notNull(outputType, "Output type must not be null!");
- AggregationOperationContext contextToUse = new AggregationUtil(queryMapper, mappingContext)
- .prepareAggregationContext(aggregation, context);
- return doAggregate(aggregation, collectionName, outputType, contextToUse);
+ return doAggregate(aggregation, collectionName, outputType,
+ queryOperations.createAggregation(aggregation, context));
+ }
+
+ private AggregationResults doAggregate(Aggregation aggregation, String collectionName, Class outputType,
+ AggregationDefinition context) {
+ return doAggregate(aggregation, collectionName, outputType, context.getAggregationOperationContext());
}
@SuppressWarnings("ConstantConditions")
@@ -2125,7 +2172,7 @@ protected AggregationResults doAggregate(Aggregation aggregation, String
List rawResult = new ArrayList<>();
- Class> domainType = aggregation instanceof TypedAggregation ? ((TypedAggregation) aggregation).getInputType()
+ Class> domainType = aggregation instanceof TypedAggregation ? ((TypedAggregation>) aggregation).getInputType()
: null;
Optional collation = Optionals.firstNonEmpty(options::getCollation,
@@ -2141,11 +2188,23 @@ protected AggregationResults doAggregate(Aggregation aggregation, String
}
options.getComment().ifPresent(aggregateIterable::comment);
+ options.getHint().ifPresent(aggregateIterable::hint);
if (options.hasExecutionTimeLimit()) {
aggregateIterable = aggregateIterable.maxTime(options.getMaxTime().toMillis(), TimeUnit.MILLISECONDS);
}
+ if (options.isSkipResults()) {
+
+ // toCollection only allowed for $out and $merge if those are the last stages
+ if (aggregation.getPipeline().isOutOrMerge()) {
+ aggregateIterable.toCollection();
+ } else {
+ aggregateIterable.first();
+ }
+ return new AggregationResults<>(Collections.emptyList(), new Document());
+ }
+
MongoIterable iterable = aggregateIterable.map(val -> {
rawResult.add(val);
@@ -2166,11 +2225,10 @@ protected CloseableIterator aggregateStream(Aggregation aggregation, Stri
Assert.notNull(outputType, "Output type must not be null!");
Assert.isTrue(!aggregation.getOptions().isExplain(), "Can't use explain option with streaming!");
- AggregationUtil aggregationUtil = new AggregationUtil(queryMapper, mappingContext);
- AggregationOperationContext rootContext = aggregationUtil.prepareAggregationContext(aggregation, context);
+ AggregationDefinition aggregationDefinition = queryOperations.createAggregation(aggregation, context);
AggregationOptions options = aggregation.getOptions();
- List pipeline = aggregationUtil.createPipeline(aggregation, rootContext);
+ List pipeline = aggregationDefinition.getAggregationPipeline();
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Streaming aggregation: {} in collection {}", serializeToJsonSafely(pipeline), collectionName);
@@ -2188,6 +2246,7 @@ protected CloseableIterator aggregateStream(Aggregation aggregation, Stri
}
options.getComment().ifPresent(cursor::comment);
+ options.getHint().ifPresent(cursor::hint);
Class> domainType = aggregation instanceof TypedAggregation ? ((TypedAggregation) aggregation).getInputType()
: null;
@@ -2288,6 +2347,7 @@ protected String replaceWithResourceIfNecessary(String function) {
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation#getCollectionNames()
*/
+ @Override
@SuppressWarnings("ConstantConditions")
public Set getCollectionNames() {
return execute(db -> {
@@ -2398,6 +2458,20 @@ protected MongoCollection doCreateCollection(String collectionName, Do
co.validationOptions(options);
}
+ if (collectionOptions.containsKey("timeseries")) {
+
+ Document timeSeries = collectionOptions.get("timeseries", Document.class);
+ com.mongodb.client.model.TimeSeriesOptions options = new com.mongodb.client.model.TimeSeriesOptions(
+ timeSeries.getString("timeField"));
+ if (timeSeries.containsKey("metaField")) {
+ options.metaField(timeSeries.getString("metaField"));
+ }
+ if (timeSeries.containsKey("granularity")) {
+ options.granularity(TimeSeriesGranularity.valueOf(timeSeries.getString("granularity").toUpperCase()));
+ }
+ co.timeSeriesOptions(options);
+ }
+
db.createCollection(collectionName, co);
MongoCollection coll = db.getCollection(collectionName, Document.class);
@@ -2552,6 +2626,19 @@ protected Document convertToDocument(@Nullable CollectionOptions collectionOptio
collectionOptions.getValidationOptions().ifPresent(it -> it.getValidator() //
.ifPresent(val -> doc.put("validator", getMappedValidator(val, targetType))));
+
+ collectionOptions.getTimeSeriesOptions().map(operations.forType(targetType)::mapTimeSeriesOptions)
+ .ifPresent(it -> {
+
+ Document timeseries = new Document("timeField", it.getTimeField());
+ if (StringUtils.hasText(it.getMetaField())) {
+ timeseries.append("metaField", it.getMetaField());
+ }
+ if (!Granularity.DEFAULT.equals(it.getGranularity())) {
+ timeseries.append("granularity", it.getGranularity().name().toLowerCase());
+ }
+ doc.put("timeseries", timeseries);
+ });
}
return doc;
@@ -2599,7 +2686,7 @@ Document getMappedValidator(Validator validator, Class> domainType) {
/**
* Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter.
* The first document that matches the query is returned and also removed from the collection in the database.
- *
+ *
* The query document is specified as a standard Document and so is the fields specification.
*
* @param collectionName name of the collection to retrieve the objects from
@@ -2715,25 +2802,24 @@ private MongoCollection getAndPrepareCollection(MongoDatabase db, Stri
* Internal method using callbacks to do queries against the datastore that requires reading a single object from a
* collection of objects. It will take the following steps
*
- *
Execute the given {@link ConnectionCallback} for a {@link Document}.
+ *
Execute the given {@link CollectionCallback} for a {@link Document}.
*
Apply the given {@link DocumentCallback} to each of the {@link Document}s to obtain the result.
*
*
* @param
* @param collectionCallback the callback to retrieve the {@link Document} with
- * @param objectCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type
+ * @param documentCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type
* @param collectionName the collection to be queried
* @return
*/
@Nullable
private T executeFindOneInternal(CollectionCallback collectionCallback,
- DocumentCallback objectCallback, String collectionName) {
+ DocumentCallback documentCallback, String collectionName) {
try {
- T result = objectCallback
- .doWith(collectionCallback.doInCollection(getAndPrepareCollection(doGetDatabase(), collectionName)));
- return result;
+ Document document = collectionCallback.doInCollection(getAndPrepareCollection(doGetDatabase(), collectionName));
+ return document != null ? documentCallback.doWith(document) : null;
} catch (RuntimeException e) {
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
}
@@ -2743,7 +2829,7 @@ private T executeFindOneInternal(CollectionCallback collectionCall
* Internal method using callback to do queries against the datastore that requires reading a collection of objects.
* It will take the following steps
*
- *
Execute the given {@link ConnectionCallback} for a {@link FindIterable}.
+ *
Execute the given {@link CollectionCallback} for a {@link FindIterable}.
*
Prepare that {@link FindIterable} with the given {@link CursorPreparer} (will be skipped if
* {@link CursorPreparer} is {@literal null}
*
Iterate over the {@link FindIterable} and applies the given {@link DocumentCallback} to each of the
@@ -2753,36 +2839,27 @@ private T executeFindOneInternal(CollectionCallback collectionCall
* @param
* @param collectionCallback the callback to retrieve the {@link FindIterable} with
* @param preparer the {@link CursorPreparer} to potentially modify the {@link FindIterable} before iterating over it
- * @param objectCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type
+ * @param documentCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type
* @param collectionName the collection to be queried
* @return
*/
private List executeFindMultiInternal(CollectionCallback> collectionCallback,
- CursorPreparer preparer, DocumentCallback objectCallback, String collectionName) {
+ CursorPreparer preparer, DocumentCallback documentCallback, String collectionName) {
try {
- MongoCursor cursor = null;
-
- try {
-
- cursor = preparer
- .initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection)
- .iterator();
+ try (MongoCursor cursor = preparer
+ .initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection)
+ .iterator()) {
List result = new ArrayList<>();
while (cursor.hasNext()) {
Document object = cursor.next();
- result.add(objectCallback.doWith(object));
+ result.add(documentCallback.doWith(object));
}
return result;
- } finally {
-
- if (cursor != null) {
- cursor.close();
- }
}
} catch (RuntimeException e) {
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
@@ -2792,23 +2869,12 @@ private List executeFindMultiInternal(CollectionCallback> collectionCallback,
CursorPreparer preparer, DocumentCallbackHandler callbackHandler, String collectionName) {
- try {
-
- MongoCursor cursor = null;
+ try (MongoCursor cursor = preparer
+ .initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection)
+ .iterator()) {
- try {
-
- cursor = preparer
- .initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection)
- .iterator();
-
- while (cursor.hasNext()) {
- callbackHandler.processDocument(cursor.next());
- }
- } finally {
- if (cursor != null) {
- cursor.close();
- }
+ while (cursor.hasNext()) {
+ callbackHandler.processDocument(cursor.next());
}
} catch (RuntimeException e) {
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
@@ -2931,6 +2997,7 @@ public FindCallback(Document query, Document fields, @Nullable com.mongodb.clien
this.collation = collation;
}
+ @Override
public FindIterable doInCollection(MongoCollection collection)
throws MongoException, DataAccessException {
@@ -2950,12 +3017,17 @@ public FindIterable doInCollection(MongoCollection collectio
* @author Christoph Strobl
* @since 2.0
*/
- @RequiredArgsConstructor
private class ExistsCallback implements CollectionCallback {
private final Document mappedQuery;
private final com.mongodb.client.model.Collation collation;
+ ExistsCallback(Document mappedQuery, com.mongodb.client.model.Collation collation) {
+
+ this.mappedQuery = mappedQuery;
+ this.collation = collation;
+ }
+
@Override
public Boolean doInCollection(MongoCollection collection) throws MongoException, DataAccessException {
@@ -2977,7 +3049,7 @@ private static class FindAndRemoveCallback implements CollectionCallback collation;
- public FindAndRemoveCallback(Document query, Document fields, Document sort, @Nullable Collation collation) {
+ FindAndRemoveCallback(Document query, Document fields, Document sort, @Nullable Collation collation) {
this.query = query;
this.fields = fields;
@@ -2985,6 +3057,7 @@ public FindAndRemoveCallback(Document query, Document fields, Document sort, @Nu
this.collation = Optional.ofNullable(collation);
}
+ @Override
public Document doInCollection(MongoCollection collection) throws MongoException, DataAccessException {
FindOneAndDeleteOptions opts = new FindOneAndDeleteOptions().sort(sort).projection(fields);
@@ -3003,8 +3076,9 @@ private static class FindAndModifyCallback implements CollectionCallback arrayFilters;
private final FindAndModifyOptions options;
- public FindAndModifyCallback(Document query, Document fields, Document sort, Object update,
- List arrayFilters, FindAndModifyOptions options) {
+ FindAndModifyCallback(Document query, Document fields, Document sort, Object update, List arrayFilters,
+ FindAndModifyOptions options) {
+
this.query = query;
this.fields = fields;
this.sort = sort;
@@ -3013,6 +3087,7 @@ public FindAndModifyCallback(Document query, Document fields, Document sort, Obj
this.options = options;
}
+ @Override
public Document doInCollection(MongoCollection collection) throws MongoException, DataAccessException {
FindOneAndUpdateOptions opts = new FindOneAndUpdateOptions();
@@ -3101,8 +3176,7 @@ public Document doInCollection(MongoCollection collection) throws Mong
interface DocumentCallback {
- @Nullable
- T doWith(@Nullable Document object);
+ T doWith(Document object);
}
/**
@@ -3113,29 +3187,33 @@ interface DocumentCallback {
* @author Christoph Strobl
* @author Roman Puchkovskiy
*/
- @RequiredArgsConstructor
private class ReadDocumentCallback implements DocumentCallback {
- private final @NonNull EntityReader super T, Bson> reader;
- private final @NonNull Class type;
+ private final EntityReader super T, Bson> reader;
+ private final Class type;
private final String collectionName;
- @Nullable
- public T doWith(@Nullable Document document) {
+ ReadDocumentCallback(EntityReader super T, Bson> reader, Class type, String collectionName) {
- T source = null;
+ this.reader = reader;
+ this.type = type;
+ this.collectionName = collectionName;
+ }
- if (document != null) {
- maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName));
- source = reader.read(type, document);
- }
+ @Override
+ public T doWith(Document document) {
+
+ maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName));
+ T entity = reader.read(type, document);
- if (source != null) {
- maybeEmitEvent(new AfterConvertEvent<>(document, source, collectionName));
- source = maybeCallAfterConvert(source, document, collectionName);
+ if (entity == null) {
+ throw new MappingException(String.format("EntityReader %s returned null", reader));
}
- return source;
+ maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName));
+ entity = maybeCallAfterConvert(entity, document, collectionName);
+
+ return entity;
}
}
@@ -3147,21 +3225,29 @@ public T doWith(@Nullable Document document) {
* @param
* @since 2.0
*/
- @RequiredArgsConstructor
private class ProjectingReadCallback implements DocumentCallback {
- private final @NonNull EntityReader reader;
- private final @NonNull Class entityType;
- private final @NonNull Class targetType;
- private final @NonNull String collectionName;
+ private final EntityReader reader;
+ private final Class entityType;
+ private final Class targetType;
+ private final String collectionName;
+
+ ProjectingReadCallback(EntityReader reader, Class entityType, Class targetType,
+ String collectionName) {
+
+ this.reader = reader;
+ this.entityType = entityType;
+ this.targetType = targetType;
+ this.collectionName = collectionName;
+ }
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.MongoTemplate.DocumentCallback#doWith(org.bson.Document)
*/
+ @Override
@SuppressWarnings("unchecked")
- @Nullable
- public T doWith(@Nullable Document document) {
+ public T doWith(Document document) {
if (document == null) {
return null;
@@ -3172,15 +3258,16 @@ public T doWith(@Nullable Document document) {
maybeEmitEvent(new AfterLoadEvent<>(document, targetType, collectionName));
- Object source = reader.read(typeToRead, document);
- Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, source) : source;
+ Object entity = reader.read(typeToRead, document);
- if (result != null) {
- maybeEmitEvent(new AfterConvertEvent<>(document, result, collectionName));
- result = maybeCallAfterConvert(result, document, collectionName);
+ if (entity == null) {
+ throw new MappingException(String.format("EntityReader %s returned null", reader));
}
- return (T) result;
+ Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, entity) : entity;
+
+ maybeEmitEvent(new AfterConvertEvent<>(document, result, collectionName));
+ return (T) maybeCallAfterConvert(result, document, collectionName);
}
}
@@ -3189,7 +3276,7 @@ class QueryCursorPreparer implements CursorPreparer {
private final Query query;
private final @Nullable Class> type;
- public QueryCursorPreparer(Query query, @Nullable Class> type) {
+ QueryCursorPreparer(Query query, @Nullable Class> type) {
this.query = query;
this.type = type;
@@ -3199,6 +3286,7 @@ public QueryCursorPreparer(Query query, @Nullable Class> type) {
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.CursorPreparer#prepare(com.mongodb.DBCursor)
*/
+ @Override
public FindIterable prepare(FindIterable iterable) {
FindIterable cursorToUse = iterable;
@@ -3250,6 +3338,10 @@ public FindIterable prepare(FindIterable iterable) {
cursorToUse = cursorToUse.batchSize(meta.getCursorBatchSize());
}
+ if (meta.getAllowDiskUse() != null) {
+ cursorToUse = cursorToUse.allowDiskUse(meta.getAllowDiskUse());
+ }
+
for (Meta.CursorOption option : meta.getFlags()) {
switch (option) {
@@ -3260,6 +3352,7 @@ public FindIterable prepare(FindIterable iterable) {
case PARTIAL:
cursorToUse = cursorToUse.partial(true);
break;
+ case SECONDARY_READS:
case SLAVE_OK:
break;
default:
@@ -3277,7 +3370,8 @@ public FindIterable prepare(FindIterable iterable) {
@Override
public ReadPreference getReadPreference() {
- return query.getMeta().getFlags().contains(CursorOption.SLAVE_OK) ? ReadPreference.primaryPreferred() : null;
+ return (query.getMeta().getFlags().contains(CursorOption.SECONDARY_READS)
+ || query.getMeta().getFlags().contains(CursorOption.SLAVE_OK)) ? ReadPreference.primaryPreferred() : null;
}
}
@@ -3311,8 +3405,8 @@ static class GeoNearResultDocumentCallback implements DocumentCallback doWith(@Nullable Document object) {
+ @Override
+ public GeoResult doWith(Document object) {
double distance = Double.NaN;
if (object.containsKey(distanceField)) {
@@ -3331,7 +3425,6 @@ public GeoResult doWith(@Nullable Document object) {
* @author Thomas Darimont
* @since 1.7
*/
- @AllArgsConstructor(access = AccessLevel.PACKAGE)
static class CloseableIterableCursorAdapter implements CloseableIterator {
private volatile @Nullable MongoCursor cursor;
@@ -3340,19 +3433,23 @@ static class CloseableIterableCursorAdapter implements CloseableIterator {
/**
* Creates a new {@link CloseableIterableCursorAdapter} backed by the given {@link MongoCollection}.
- *
- * @param cursor
- * @param exceptionTranslator
- * @param objectReadCallback
*/
- public CloseableIterableCursorAdapter(MongoIterable cursor,
- PersistenceExceptionTranslator exceptionTranslator, DocumentCallback objectReadCallback) {
+ CloseableIterableCursorAdapter(MongoIterable cursor, PersistenceExceptionTranslator exceptionTranslator,
+ DocumentCallback objectReadCallback) {
this.cursor = cursor.iterator();
this.exceptionTranslator = exceptionTranslator;
this.objectReadCallback = objectReadCallback;
}
+ CloseableIterableCursorAdapter(MongoCursor cursor, PersistenceExceptionTranslator exceptionTranslator,
+ DocumentCallback objectReadCallback) {
+
+ this.cursor = cursor;
+ this.exceptionTranslator = exceptionTranslator;
+ this.objectReadCallback = objectReadCallback;
+ }
+
@Override
public boolean hasNext() {
@@ -3379,8 +3476,7 @@ public T next() {
try {
Document item = cursor.next();
- T converted = objectReadCallback.doWith(item);
- return converted;
+ return objectReadCallback.doWith(item);
} catch (RuntimeException ex) {
throw potentiallyConvertRuntimeException(ex, exceptionTranslator);
}
@@ -3406,14 +3502,27 @@ public void close() {
}
}
+ /**
+ * @deprecated since 3.1.4. Use {@link #getMongoDatabaseFactory()} instead.
+ * @return the {@link MongoDatabaseFactory} in use.
+ */
+ @Deprecated
public MongoDatabaseFactory getMongoDbFactory() {
+ return getMongoDatabaseFactory();
+ }
+
+ /**
+ * @return the {@link MongoDatabaseFactory} in use.
+ * @since 3.1.4
+ */
+ public MongoDatabaseFactory getMongoDatabaseFactory() {
return mongoDbFactory;
}
/**
* {@link MongoTemplate} extension bound to a specific {@link ClientSession} that is applied when interacting with the
* server through the driver API.
- *
+ *
* The prepare steps for {@link MongoDatabase} and {@link MongoCollection} proxy the target and invoke the desired
* target method matching the actual arguments plus a {@link ClientSession}.
*
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/PropertyOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/PropertyOperations.java
index 6afb8e9405..15295eb155 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/PropertyOperations.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/PropertyOperations.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2018-2020 the original author or authors.
+ * Copyright 2018-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,9 +15,6 @@
*/
package org.springframework.data.mongodb.core;
-import lombok.AccessLevel;
-import lombok.RequiredArgsConstructor;
-
import org.bson.Document;
import org.springframework.data.mapping.SimplePropertyHandler;
import org.springframework.data.mapping.context.MappingContext;
@@ -33,11 +30,14 @@
* @author Christoph Strobl
* @since 2.1
*/
-@RequiredArgsConstructor(access = AccessLevel.PACKAGE)
class PropertyOperations {
private final MappingContext extends MongoPersistentEntity>, MongoPersistentProperty> mappingContext;
+ PropertyOperations(MappingContext extends MongoPersistentEntity>, MongoPersistentProperty> mappingContext) {
+ this.mappingContext = mappingContext;
+ }
+
/**
* For cases where {@code fields} is {@link Document#isEmpty() empty} include only fields that are required for
* creating the projection (target) type if the {@code targetType} is a {@literal DTO projection} or a
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java
index 438e53c5dc..38a921043e 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2020 the original author or authors.
+ * Copyright 2020-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,6 +17,7 @@
import java.util.List;
import java.util.Map;
+import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
@@ -31,11 +32,17 @@
import org.springframework.data.mapping.PropertyReferenceException;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mongodb.CodecRegistryProvider;
+import org.springframework.data.mongodb.MongoExpression;
import org.springframework.data.mongodb.core.MappedDocument.MappedUpdate;
import org.springframework.data.mongodb.core.aggregation.Aggregation;
+import org.springframework.data.mongodb.core.aggregation.AggregationExpression;
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
+import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
+import org.springframework.data.mongodb.core.aggregation.AggregationPipeline;
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
+import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext;
+import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
import org.springframework.data.mongodb.core.convert.QueryMapper;
import org.springframework.data.mongodb.core.convert.UpdateMapper;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
@@ -48,6 +55,7 @@
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
import org.springframework.data.mongodb.util.BsonUtils;
import org.springframework.data.projection.ProjectionFactory;
+import org.springframework.data.util.Lazy;
import org.springframework.lang.Nullable;
import org.springframework.util.ClassUtils;
import org.springframework.util.ObjectUtils;
@@ -194,9 +202,34 @@ DeleteContext deleteSingleContext(Query query) {
return new DeleteContext(query, false);
}
+ /**
+ * Create a new {@link AggregationDefinition} for the given {@link Aggregation}.
+ *
+ * @param aggregation must not be {@literal null}.
+ * @param inputType fallback mapping type in case of untyped aggregation. Can be {@literal null}.
+ * @return new instance of {@link AggregationDefinition}.
+ * @since 3.2
+ */
+ AggregationDefinition createAggregation(Aggregation aggregation, @Nullable Class> inputType) {
+ return new AggregationDefinition(aggregation, inputType);
+ }
+
+ /**
+ * Create a new {@link AggregationDefinition} for the given {@link Aggregation}.
+ *
+ * @param aggregation must not be {@literal null}.
+ * @param aggregationOperationContext the {@link AggregationOperationContext} to use. Can be {@literal null}.
+ * @return new instance of {@link AggregationDefinition}.
+ * @since 3.2
+ */
+ AggregationDefinition createAggregation(Aggregation aggregation,
+ @Nullable AggregationOperationContext aggregationOperationContext) {
+ return new AggregationDefinition(aggregation, aggregationOperationContext);
+ }
+
/**
* {@link QueryContext} encapsulates common tasks required to convert a {@link Query} into its MongoDB document
- * representation, mapping fieldnames, as well as determinging and applying {@link Collation collations}.
+ * representation, mapping field names, as well as determining and applying {@link Collation collations}.
*
* @author Christoph Strobl
*/
@@ -205,7 +238,7 @@ class QueryContext {
private final Query query;
/**
- * Create new a {@link QueryContext} instance from the given {@literal query} (can be eihter a {@link Query} or a
+ * Create new a {@link QueryContext} instance from the given {@literal query} (can be either a {@link Query} or a
* plain {@link Document}.
*
* @param query can be {@literal null}.
@@ -258,7 +291,21 @@ Document getMappedQuery(@Nullable MongoPersistentEntity entity) {
Document getMappedFields(@Nullable MongoPersistentEntity> entity, Class> targetType,
ProjectionFactory projectionFactory) {
- Document fields = query.getFieldsObject();
+ Document fields = new Document();
+
+ for (Entry entry : query.getFieldsObject().entrySet()) {
+
+ if (entry.getValue() instanceof MongoExpression) {
+
+ AggregationOperationContext ctx = entity == null ? Aggregation.DEFAULT_CONTEXT
+ : new RelaxedTypeBasedAggregationOperationContext(entity.getType(), mappingContext, queryMapper);
+
+ fields.put(entry.getKey(), AggregationExpression.from((MongoExpression) entry.getValue()).toDocument(ctx));
+ } else {
+ fields.put(entry.getKey(), entry.getValue());
+ }
+ }
+
Document mappedFields = fields;
if (entity == null) {
@@ -275,7 +322,7 @@ Document getMappedFields(@Nullable MongoPersistentEntity> entity, Class> tar
mappingContext.getRequiredPersistentEntity(targetType));
}
- if (entity != null && entity.hasTextScoreProperty() && !query.getQueryObject().containsKey("$text")) {
+ if (entity.hasTextScoreProperty() && !query.getQueryObject().containsKey("$text")) {
mappedFields.remove(entity.getTextScoreProperty().getFieldName());
}
@@ -341,7 +388,8 @@ private DistinctQueryContext(@Nullable Object query, String fieldName) {
}
@Override
- Document getMappedFields(@Nullable MongoPersistentEntity> entity, Class> targetType, ProjectionFactory projectionFactory) {
+ Document getMappedFields(@Nullable MongoPersistentEntity> entity, Class> targetType,
+ ProjectionFactory projectionFactory) {
return getMappedFields(entity);
}
@@ -565,7 +613,7 @@ class UpdateContext extends QueryContext {
UpdateContext(MappedDocument update, boolean upsert) {
- super(new BasicQuery(new Document(BsonUtils.asMap(update.getIdFilter()))));
+ super(new BasicQuery(BsonUtils.asDocument(update.getIdFilter())));
this.multi = false;
this.upsert = upsert;
this.mappedDocument = update;
@@ -658,7 +706,8 @@ Document applyShardKey(MongoPersistentEntity domainType, Document filter,
: mappedDocument != null ? mappedDocument.getDocument() : getMappedUpdate(domainType);
Document filterWithShardKey = new Document(filter);
- getMappedShardKeyFields(domainType).forEach(key -> filterWithShardKey.putIfAbsent(key, shardKeySource.get(key)));
+ getMappedShardKeyFields(domainType)
+ .forEach(key -> filterWithShardKey.putIfAbsent(key, BsonUtils.resolveValue(shardKeySource, key)));
return filterWithShardKey;
}
@@ -707,10 +756,10 @@ Document getMappedShardKey(MongoPersistentEntity> entity) {
*/
List getUpdatePipeline(@Nullable Class> domainType) {
- AggregationOperationContext context = domainType != null
- ? new RelaxedTypeBasedAggregationOperationContext(domainType, mappingContext, queryMapper)
- : Aggregation.DEFAULT_CONTEXT;
+ Class> type = domainType != null ? domainType : Object.class;
+ AggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(type, mappingContext,
+ queryMapper);
return aggregationUtil.createPipeline((AggregationUpdate) update, context);
}
@@ -760,4 +809,105 @@ boolean isMulti() {
return multi;
}
}
+
+ /**
+ * A value object that encapsulates common tasks required when running {@literal aggregations}.
+ *
+ * @since 3.2
+ */
+ class AggregationDefinition {
+
+ private final Aggregation aggregation;
+ private final Lazy aggregationOperationContext;
+ private final Lazy> pipeline;
+ private final @Nullable Class> inputType;
+
+ /**
+ * Creates new instance of {@link AggregationDefinition} extracting the input type from either the
+ * {@link org.springframework.data.mongodb.core.aggregation.Aggregation} in case of a {@link TypedAggregation} or
+ * the given {@literal aggregationOperationContext} if present.
+ * Creates a new {@link AggregationOperationContext} if none given, based on the {@link Aggregation} input type and
+ * the desired {@link AggregationOptions#getDomainTypeMapping() domain type mapping}.
+ * Pipelines are mapped on first access of {@link #getAggregationPipeline()} and cached for reuse.
+ *
+ * @param aggregation the source aggregation.
+ * @param aggregationOperationContext can be {@literal null}.
+ */
+ AggregationDefinition(Aggregation aggregation, @Nullable AggregationOperationContext aggregationOperationContext) {
+
+ this.aggregation = aggregation;
+
+ if (aggregation instanceof TypedAggregation) {
+ this.inputType = ((TypedAggregation>) aggregation).getInputType();
+ } else if (aggregationOperationContext instanceof TypeBasedAggregationOperationContext) {
+ this.inputType = ((TypeBasedAggregationOperationContext) aggregationOperationContext).getType();
+ } else {
+ this.inputType = null;
+ }
+
+ this.aggregationOperationContext = Lazy.of(() -> aggregationOperationContext != null ? aggregationOperationContext
+ : aggregationUtil.createAggregationContext(aggregation, getInputType()));
+ this.pipeline = Lazy.of(() -> aggregationUtil.createPipeline(this.aggregation, getAggregationOperationContext()));
+ }
+
+ /**
+ * Creates new instance of {@link AggregationDefinition} extracting the input type from either the
+ * {@link org.springframework.data.mongodb.core.aggregation.Aggregation} in case of a {@link TypedAggregation} or
+ * the given {@literal aggregationOperationContext} if present.
+ * Creates a new {@link AggregationOperationContext} based on the {@link Aggregation} input type and the desired
+ * {@link AggregationOptions#getDomainTypeMapping() domain type mapping}.
+ * Pipelines are mapped on first access of {@link #getAggregationPipeline()} and cached for reuse.
+ *
+ * @param aggregation the source aggregation.
+ * @param inputType can be {@literal null}.
+ */
+ AggregationDefinition(Aggregation aggregation, @Nullable Class> inputType) {
+
+ this.aggregation = aggregation;
+
+ if (aggregation instanceof TypedAggregation) {
+ this.inputType = ((TypedAggregation>) aggregation).getInputType();
+ } else {
+ this.inputType = inputType;
+ }
+
+ this.aggregationOperationContext = Lazy
+ .of(() -> aggregationUtil.createAggregationContext(aggregation, getInputType()));
+ this.pipeline = Lazy.of(() -> aggregationUtil.createPipeline(this.aggregation, getAggregationOperationContext()));
+ }
+
+ /**
+ * Obtain the already mapped pipeline.
+ *
+ * @return never {@literal null}.
+ */
+ List getAggregationPipeline() {
+ return pipeline.get();
+ }
+
+ /**
+ * @return {@literal true} if the last aggregation stage is either {@literal $out} or {@literal $merge}.
+ * @see AggregationPipeline#isOutOrMerge()
+ */
+ boolean isOutOrMerge() {
+ return aggregation.getPipeline().isOutOrMerge();
+ }
+
+ /**
+ * Obtain the {@link AggregationOperationContext} used for mapping the pipeline.
+ *
+ * @return never {@literal null}.
+ */
+ AggregationOperationContext getAggregationOperationContext() {
+ return aggregationOperationContext.get();
+ }
+
+ /**
+ * @return the input type to map the pipeline against. Can be {@literal null}.
+ */
+ @Nullable
+ Class> getInputType() {
+ return inputType;
+ }
+ }
}
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java
index e2a4743a40..709f940b98 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2020 the original author or authors.
+ * Copyright 2017-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java
index ce512b2615..01165bb996 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2020 the original author or authors.
+ * Copyright 2017-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,10 +15,6 @@
*/
package org.springframework.data.mongodb.core;
-import lombok.AccessLevel;
-import lombok.NonNull;
-import lombok.RequiredArgsConstructor;
-import lombok.experimental.FieldDefaults;
import reactor.core.publisher.Flux;
import org.springframework.data.mongodb.core.aggregation.Aggregation;
@@ -62,15 +58,22 @@ public ReactiveAggregation aggregateAndReturn(Class domainType) {
return new ReactiveAggregationSupport<>(template, domainType, null, null);
}
- @RequiredArgsConstructor
- @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
static class ReactiveAggregationSupport
implements AggregationOperationWithAggregation, ReactiveAggregation, TerminatingAggregationOperation {
- @NonNull ReactiveMongoTemplate template;
- @NonNull Class domainType;
- Aggregation aggregation;
- String collection;
+ private final ReactiveMongoTemplate template;
+ private final Class domainType;
+ private final Aggregation aggregation;
+ private final String collection;
+
+ ReactiveAggregationSupport(ReactiveMongoTemplate template, Class domainType, Aggregation aggregation,
+ String collection) {
+
+ this.template = template;
+ this.domainType = domainType;
+ this.aggregation = aggregation;
+ this.collection = collection;
+ }
/*
* (non-Javadoc)
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java
index eab988c5ab..fe73abc9c6 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2019-2020 the original author or authors.
+ * Copyright 2019-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -71,7 +71,7 @@ interface TerminatingChangeStream {
/**
* Start listening to changes. The stream will not be completed unless the {@link org.reactivestreams.Subscription}
* is {@link org.reactivestreams.Subscription#cancel() canceled}.
- *
+ *
* However, the stream may become dead, or invalid, if all watched collections, databases are dropped.
*/
Flux> listen();
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupport.java
index 691a6e256c..978e16622a 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupport.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupport.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2019-2020 the original author or authors.
+ * Copyright 2019-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java
index f59ed5e60e..4e9cd08694 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2016-2020 the original author or authors.
+ * Copyright 2016-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java
index cc36e0358b..13160feaf2 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2016-2020 the original author or authors.
+ * Copyright 2016-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java
index d9cdb3f257..e668ad4ed5 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2020 the original author or authors.
+ * Copyright 2017-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -91,10 +91,10 @@ interface TerminatingFind {
* Get all matching elements using a {@link com.mongodb.CursorType#TailableAwait tailable cursor}. The stream will
* not be completed unless the {@link org.reactivestreams.Subscription} is
* {@link org.reactivestreams.Subscription#cancel() canceled}.
- *