From 9612255bbca7416511be7ea0efffa4ad3179fc61 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Thu, 8 Dec 2016 13:54:21 +0100 Subject: [PATCH 1/5] DATAMONGO-1552 - Add $facet, $bucket and $bucketAuto aggregation stages. Prepare issue branch. --- pom.xml | 2 +- spring-data-mongodb-cross-store/pom.xml | 4 ++-- spring-data-mongodb-distribution/pom.xml | 2 +- spring-data-mongodb-log4j/pom.xml | 2 +- spring-data-mongodb/pom.xml | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pom.xml b/pom.xml index ea80a3cb74..1e6526809f 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.springframework.data spring-data-mongodb-parent - 1.10.0.BUILD-SNAPSHOT + 1.10.0.DATAMONGO-1552-SNAPSHOT pom Spring Data MongoDB diff --git a/spring-data-mongodb-cross-store/pom.xml b/spring-data-mongodb-cross-store/pom.xml index ae0a5d6c8f..c417d0822a 100644 --- a/spring-data-mongodb-cross-store/pom.xml +++ b/spring-data-mongodb-cross-store/pom.xml @@ -6,7 +6,7 @@ org.springframework.data spring-data-mongodb-parent - 1.10.0.BUILD-SNAPSHOT + 1.10.0.DATAMONGO-1552-SNAPSHOT ../pom.xml @@ -48,7 +48,7 @@ org.springframework.data spring-data-mongodb - 1.10.0.BUILD-SNAPSHOT + 1.10.0.DATAMONGO-1552-SNAPSHOT diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index 2d02722262..a173224695 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -13,7 +13,7 @@ org.springframework.data spring-data-mongodb-parent - 1.10.0.BUILD-SNAPSHOT + 1.10.0.DATAMONGO-1552-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb-log4j/pom.xml b/spring-data-mongodb-log4j/pom.xml index ee5e3336db..8247eff622 100644 --- a/spring-data-mongodb-log4j/pom.xml +++ b/spring-data-mongodb-log4j/pom.xml @@ -5,7 +5,7 @@ org.springframework.data spring-data-mongodb-parent - 1.10.0.BUILD-SNAPSHOT + 1.10.0.DATAMONGO-1552-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 8072d3f665..4614d94952 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -11,7 +11,7 @@ org.springframework.data spring-data-mongodb-parent - 1.10.0.BUILD-SNAPSHOT + 1.10.0.DATAMONGO-1552-SNAPSHOT ../pom.xml From 1ef6e5077c68c377ca0cd84a96540b83b9dae91e Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 9 Dec 2016 09:23:59 +0100 Subject: [PATCH 2/5] DATAMONGO-1552 - Add $bucket aggregation stage. --- .../mongodb/core/aggregation/Aggregation.java | 22 +- .../core/aggregation/BucketOperation.java | 226 ++++++ .../aggregation/BucketOperationSupport.java | 697 ++++++++++++++++++ .../core/aggregation/AggregationTests.java | 56 ++ .../aggregation/BucketOperationUnitTests.java | 254 +++++++ 5 files changed, 1254 insertions(+), 1 deletion(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketOperationUnitTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java index ff9ec46d14..0b722b9f7f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java @@ -198,7 +198,7 @@ public static ProjectionOperation project(String... fields) { } /** - * Creates a new {@link ProjectionOperation} includeing the given {@link Fields}. + * Creates a new {@link ProjectionOperation} including the given {@link Fields}. * * @param fields must not be {@literal null}. * @return @@ -368,6 +368,26 @@ public static OutOperation out(String outCollectionName) { return new OutOperation(outCollectionName); } + /** + * Creates a new {@link BucketOperation} using given {@literal groupByField}. + * + * @param groupByField must not be {@literal null} or empty. + * @return + */ + public static BucketOperation bucket(String groupByField) { + return new BucketOperation(field(groupByField)); + } + + /** + * Creates a new {@link BucketOperation} using given {@link AggregationExpression group-by expression}. + * + * @param groupByExpression must not be {@literal null}. + * @return + */ + public static BucketOperation bucket(AggregationExpression groupByExpression) { + return new BucketOperation(groupByExpression); + } + /** * Creates a new {@link LookupOperation}. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java new file mode 100644 index 0000000000..080ece369e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java @@ -0,0 +1,226 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.springframework.data.mongodb.core.aggregation.BucketOperation.BucketOperationOutputBuilder; +import org.springframework.util.Assert; + +import com.mongodb.BasicDBObject; +import com.mongodb.DBObject; + +/** + * Encapsulates the aggregation framework {@code $bucket}-operation. + *

+ * Bucket stage is typically used with {@link Aggregation} and {@code $facet}. Categorizes incoming documents into + * groups, called buckets, based on a specified expression and bucket boundaries. + *

+ * We recommend to use the static factory method {@link Aggregation#bucket(String)} instead of creating instances of + * this class directly. + * + * @see http://docs.mongodb.org/manual/reference/aggregation/bucket/ + * @see BucketOperationSupport + * @author Mark Paluch + * @since 1.10 + */ +public class BucketOperation extends BucketOperationSupport + implements FieldsExposingAggregationOperation { + + private final List boundaries; + private final Object defaultBucket; + + /** + * Creates a new {@link BucketOperation} given a {@link Field group-by field}. + * + * @param groupByField must not be {@literal null}. + */ + public BucketOperation(Field groupByField) { + + super(groupByField); + + this.boundaries = Collections.emptyList(); + this.defaultBucket = null; + } + + /** + * Creates a new {@link BucketOperation} given a {@link AggregationExpression group-by expression}. + * + * @param groupByExpression must not be {@literal null}. + */ + public BucketOperation(AggregationExpression groupByExpression) { + + super(groupByExpression); + + this.boundaries = Collections.emptyList(); + this.defaultBucket = null; + } + + private BucketOperation(BucketOperation bucketOperation, Outputs outputs) { + + super(bucketOperation, outputs); + + this.boundaries = bucketOperation.boundaries; + this.defaultBucket = bucketOperation.defaultBucket; + } + + private BucketOperation(BucketOperation bucketOperation, List boundaries, Object defaultBucket) { + + super(bucketOperation); + + this.boundaries = new ArrayList(boundaries); + this.defaultBucket = defaultBucket; + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + */ + @Override + public DBObject toDBObject(AggregationOperationContext context) { + + DBObject options = new BasicDBObject(); + + options.put("boundaries", context.getMappedObject(new BasicDBObject("$set", boundaries)).get("$set")); + + if (defaultBucket != null) { + options.put("default", context.getMappedObject(new BasicDBObject("$set", defaultBucket)).get("$set")); + } + + options.putAll(super.toDBObject(context)); + + return new BasicDBObject("$bucket", options); + } + + /** + * Configures a default bucket {@literal literal} and return a new {@link BucketOperation}. + * + * @param literal must not be {@literal null}. + * @return + */ + public BucketOperation withDefaultBucket(Object literal) { + + Assert.notNull(literal, "Default bucket literal must not be null!"); + return new BucketOperation(this, boundaries, literal); + } + + /** + * Configures {@literal boundaries} and return a new {@link BucketOperation}. Existing {@literal boundaries} are + * preserved and the new {@literal boundaries} are appended. + * + * @param boundaries must not be {@literal null}. + * @return + */ + public BucketOperation withBoundaries(Object... boundaries) { + + Assert.notNull(boundaries, "Boundaries must not be null!"); + + List newBoundaries = new ArrayList(this.boundaries.size() + boundaries.length); + newBoundaries.addAll(this.boundaries); + newBoundaries.addAll(Arrays.asList(boundaries)); + + return new BucketOperation(this, newBoundaries, defaultBucket); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#newBucketOperation(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.Outputs) + */ + @Override + protected BucketOperation newBucketOperation(Outputs outputs) { + return new BucketOperation(this, outputs); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutputExpression(java.lang.String, java.lang.Object[]) + */ + @Override + public ExpressionBucketOperationBuilder andOutputExpression(String expression, Object... params) { + return new ExpressionBucketOperationBuilder(expression, this, params); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutput(org.springframework.data.mongodb.core.aggregation.AggregationExpression) + */ + @Override + public BucketOperationOutputBuilder andOutput(AggregationExpression expression) { + return new BucketOperationOutputBuilder(expression, this); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutput(java.lang.String) + */ + @Override + public BucketOperationOutputBuilder andOutput(String fieldName) { + return new BucketOperationOutputBuilder(Fields.field(fieldName), this); + } + + /** + * {@link OutputBuilder} implementation for {@link BucketOperation}. + */ + public static class BucketOperationOutputBuilder + extends BucketOperationSupport.OutputBuilder { + + /** + * Creates a new {@link BucketOperationOutputBuilder} fot the given value and {@link BucketOperation}. + * + * @param value must not be {@literal null}. + * @param operation must not be {@literal null}. + */ + protected BucketOperationOutputBuilder(Object value, BucketOperation operation) { + super(value, operation); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder#apply(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OperationOutput) + */ + @Override + protected BucketOperationOutputBuilder apply(OperationOutput operationOutput) { + return new BucketOperationOutputBuilder(operationOutput, this.operation); + } + } + + /** + * {@link ExpressionBucketOperationBuilderSupport} implementation for {@link BucketOperation} using SpEL expression + * based {@link Output}. + * + * @author Mark Paluch + */ + public static class ExpressionBucketOperationBuilder + extends ExpressionBucketOperationBuilderSupport { + + /** + * Creates a new {@link ExpressionBucketOperationBuilderSupport} for the given value, {@link BucketOperation} + * and parameters. + * + * @param expression must not be {@literal null}. + * @param operation must not be {@literal null}. + * @param parameters + */ + protected ExpressionBucketOperationBuilder(String expression, BucketOperation operation, Object[] parameters) { + super(expression, operation, parameters); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder#apply(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OperationOutput) + */ + @Override + protected BucketOperationOutputBuilder apply(OperationOutput operationOutput) { + return new BucketOperationOutputBuilder(operationOutput, this.operation); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java new file mode 100644 index 0000000000..13a63e6bd1 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java @@ -0,0 +1,697 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +import org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; +import org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder; +import org.springframework.expression.spel.ast.Projection; +import org.springframework.util.Assert; + +import com.mongodb.BasicDBObject; +import com.mongodb.DBObject; + +/** + * Base class for bucket operations that support output expressions the aggregation framework. + *

+ * Bucket stages collect documents into buckets and can contribute output fields. + *

+ * Implementing classes are required to provide an {@link OutputBuilder}. + * + * @see http://docs.mongodb.org/manual/reference/aggregation/bucket/ + * @author Mark Paluch + * @since 1.10 + */ +public abstract class BucketOperationSupport, B extends OutputBuilder> + implements FieldsExposingAggregationOperation { + + private final Field groupByField; + private final AggregationExpression groupByExpression; + private final Outputs outputs; + + /** + * Creates a new {@link BucketOperationSupport} given a {@link Field group-by field}. + * + * @param groupByField must not be {@literal null}. + */ + protected BucketOperationSupport(Field groupByField) { + + Assert.notNull(groupByField, "Group by field must not be null!"); + + this.groupByField = groupByField; + this.groupByExpression = null; + this.outputs = Outputs.EMPTY; + + } + + /** + * Creates a new {@link BucketOperationSupport} given a {@link AggregationExpression group-by expression}. + * + * @param groupByExpression must not be {@literal null}. + */ + protected BucketOperationSupport(AggregationExpression groupByExpression) { + + Assert.notNull(groupByExpression, "Group by AggregationExpression must not be null!"); + + this.groupByExpression = groupByExpression; + this.groupByField = null; + this.outputs = Outputs.EMPTY; + } + + /** + * Creates a copy of {@link BucketOperationSupport}. + * + * @param operationSupport must not be {@literal null}. + */ + protected BucketOperationSupport(BucketOperationSupport operationSupport) { + this(operationSupport, operationSupport.outputs); + } + + /** + * Creates a copy of {@link BucketOperationSupport} and applies the new {@link Outputs}. + * + * @param operationSupport must not be {@literal null}. + * @param outputs must not be {@literal null}. + */ + protected BucketOperationSupport(BucketOperationSupport operationSupport, Outputs outputs) { + + Assert.notNull(operationSupport, "BucketOperationSupport must not be null!"); + Assert.notNull(outputs, "Outputs must not be null!"); + + this.groupByField = operationSupport.groupByField; + this.groupByExpression = operationSupport.groupByExpression; + this.outputs = outputs; + } + + /** + * Creates a new {@link ExpressionBucketOperationBuilderSupport} given a SpEL {@literal expression} and optional + * {@literal params} to add an output field to the resulting bucket documents. + * + * @param expression the SpEL expression, must not be {@literal null} or empty. + * @param params must not be {@literal null} + * @return + */ + public abstract ExpressionBucketOperationBuilderSupport andOutputExpression(String expression, + Object... params); + + /** + * Creates a new {@link BucketOperationSupport} given an {@link AggregationExpression} to add an output field to the + * resulting bucket documents. + * + * @param expression the SpEL expression, must not be {@literal null} or empty. + * @return + */ + public abstract B andOutput(AggregationExpression expression); + + /** + * Creates a new {@link BucketOperationSupport} given {@literal fieldName} to add an output field to the resulting + * bucket documents. {@link BucketOperationSupport} exposes accumulation operations that can be applied to + * {@literal fieldName}. + * + * @param fieldName must not be {@literal null} or empty. + * @return + */ + public abstract B andOutput(String fieldName); + + /** + * Creates a new {@link BucketOperationSupport} given to add a count field to the resulting bucket documents. + * + * @return + */ + public B andOutputCount() { + return andOutput(new AggregationExpression() { + @Override + public DBObject toDbObject(AggregationOperationContext context) { + return new BasicDBObject("$sum", 1); + } + }); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + */ + @Override + public DBObject toDBObject(AggregationOperationContext context) { + + DBObject dbObject = new BasicDBObject(); + + dbObject.put("groupBy", groupByExpression == null ? context.getReference(groupByField).toString() + : groupByExpression.toDbObject(context)); + + if (!outputs.isEmpty()) { + dbObject.put("output", outputs.toDbObject(context)); + } + + return dbObject; + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields() + */ + @Override + public ExposedFields getFields() { + return outputs.asExposedFields(); + } + + /** + * Implementation hook to create a new bucket operation. + * + * @param outputs the outputs + * @return the new bucket operation. + */ + protected abstract T newBucketOperation(Outputs outputs); + + protected T andOutput(Output output) { + return newBucketOperation(outputs.and(output)); + } + + /** + * Builder for SpEL expression-based {@link Output}. + * + * @author Mark Paluch + */ + public abstract static class ExpressionBucketOperationBuilderSupport, T extends BucketOperationSupport> + extends OutputBuilder { + + /** + * Creates a new {@link ExpressionBucketOperationBuilderSupport} for the given value, {@link BucketOperationSupport} + * and parameters. + * + * @param expression must not be {@literal null}. + * @param operation must not be {@literal null}. + * @param parameters + */ + protected ExpressionBucketOperationBuilderSupport(String expression, T operation, Object[] parameters) { + super(new SpelExpressionOutput(expression, parameters), operation); + } + } + + /** + * Base class for {@link Output} builders that result in a {@link BucketOperationSupport} providing the built + * {@link Output}. + * + * @author Mark Paluch + */ + public abstract static class OutputBuilder, T extends BucketOperationSupport> { + + protected final Object value; + protected final T operation; + + /** + * Creates a new {@link OutputBuilder} for the given value and {@link BucketOperationSupport}. + * + * @param value must not be {@literal null}. + * @param operation must not be {@literal null}. + */ + public OutputBuilder(Object value, T operation) { + + Assert.notNull(value, "Value must not be null or empty!"); + Assert.notNull(operation, "ProjectionOperation must not be null!"); + + this.value = value; + this.operation = operation; + } + + /** + * Generates a builder for a {@code $sum}-expression. + *

+ * Count expressions are emulated via {@code $sum: 1}. + *

+ * + * @return + */ + public B count() { + return sum(1); + } + + /** + * Generates a builder for a {@code $sum}-expression for the current value. + * + * @return + */ + public B sum() { + return apply(Accumulators.SUM); + } + + /** + * Generates a builder for a {@code $sum}-expression for the given {@literal value}. + * + * @param value + * @return + */ + public B sum(Number value) { + return apply(new OperationOutput(Accumulators.SUM.toString(), Collections.singleton(value))); + } + + /** + * Generates a builder for an {@code $last}-expression for the current value.. + * + * @return + */ + public B last() { + return apply(Accumulators.LAST); + } + + /** + * Generates a builder for a {@code $first}-expression the current value. + * + * @return + */ + public B first() { + return apply(Accumulators.FIRST); + } + + /** + * Generates a builder for an {@code $avg}-expression for the current value. + * + * @param reference + * @return + */ + public B avg() { + return apply(Accumulators.AVG); + } + + /** + * Generates a builder for an {@code $min}-expression for the current value. + * + * @return + */ + public B min() { + return apply(Accumulators.MIN); + } + + /** + * Generates a builder for an {@code $max}-expression for the current value. + * + * @return + */ + public B max() { + return apply(Accumulators.MAX); + } + + /** + * Generates a builder for an {@code $push}-expression for the current value. + * + * @return + */ + public B push() { + return apply(Accumulators.PUSH); + } + + /** + * Generates a builder for an {@code $addToSet}-expression for the current value. + * + * @return + */ + public B addToSet() { + return apply(Accumulators.ADDTOSET); + } + + /** + * Apply an operator to the current value. + * + * @param operation the operation name, must not be {@literal null} or empty. + * @param values must not be {@literal null}. + * @return + */ + public B apply(String operation, Object... values) { + + Assert.hasText(operation, "Operation must not be empty or null!"); + Assert.notNull(value, "Values must not be null!"); + + List objects = new ArrayList(values.length + 1); + objects.add(value); + objects.addAll(Arrays.asList(values)); + return apply(new OperationOutput(operation, objects)); + } + + /** + * Apply an {@link OperationOutput} to this output. + * + * @param operationOutput must not be {@literal null}. + * @return + */ + protected abstract B apply(OperationOutput operationOutput); + + private B apply(Accumulators operation) { + return this.apply(operation.toString()); + } + + /** + * Returns the finally to be applied {@link BucketOperation} with the given alias. + * + * @param alias will never be {@literal null} or empty. + * @return + */ + public T as(String alias) { + + if (value instanceof OperationOutput) { + return this.operation.andOutput(((OperationOutput) this.value).withAlias(alias)); + } + + if (value instanceof Field) { + throw new IllegalStateException("Cannot add a field as top-level output. Use accumulator expressions."); + } + + return this.operation + .andOutput(new AggregationExpressionOutput(Fields.field(alias), (AggregationExpression) value)); + } + } + + private enum Accumulators { + + SUM("$sum"), AVG("$avg"), FIRST("$first"), LAST("$last"), MAX("$max"), MIN("$min"), PUSH("$push"), ADDTOSET( + "$addToSet"); + + private String mongoOperator; + + Accumulators(String mongoOperator) { + this.mongoOperator = mongoOperator; + } + + /* (non-Javadoc) + * @see java.lang.Enum#toString() + */ + @Override + public String toString() { + return mongoOperator; + } + } + + /** + * Encapsulates {@link Output}s. + * + * @author Mark Paluch + */ + protected static class Outputs implements AggregationExpression { + + protected static final Outputs EMPTY = new Outputs(); + + private List outputs; + + /** + * Creates a new, empty {@link Outputs}. + */ + private Outputs() { + this.outputs = new ArrayList(); + } + + /** + * Creates new {@link Outputs} containing all given {@link Output}s. + * + * @param current + * @param output + */ + private Outputs(Collection current, Output output) { + + this.outputs = new ArrayList(current.size() + 1); + this.outputs.addAll(current); + this.outputs.add(output); + } + + /** + * @return the {@link ExposedFields} derived from {@link Output}. + */ + protected ExposedFields asExposedFields() { + + ExposedFields fields = ExposedFields.from(); + + for (Output output : outputs) { + fields = fields.and(output.getExposedField()); + } + + return fields; + } + + /** + * Create a new {@link Outputs} that contains the new {@link Output}. + * + * @param output must not be {@literal null}. + * @return the new {@link Outputs} that contains the new {@link Output} + */ + protected Outputs and(Output output) { + + Assert.notNull(output, "BucketOutput must not be null!"); + return new Outputs(this.outputs, output); + } + + /** + * @return {@literal true} if {@link Outputs} contains no {@link Output}. + */ + protected boolean isEmpty() { + return outputs.isEmpty(); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDbObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + */ + @Override + public DBObject toDbObject(AggregationOperationContext context) { + + DBObject dbObject = new BasicDBObject(); + + for (Output output : outputs) { + dbObject.put(output.getExposedField().getName(), output.toDbObject(context)); + } + + return dbObject; + } + + } + + /** + * Encapsulates an output field in a bucket aggregation stage. + *

+ * Output fields can be either top-level fields that define a valid field name or nested output fields using + * operators. + * + * @author Mark Paluch + */ + protected abstract static class Output implements AggregationExpression { + + private final ExposedField field; + + /** + * Creates new {@link Projection} for the given {@link Field}. + * + * @param field must not be {@literal null}. + */ + protected Output(Field field) { + + Assert.notNull(field, "Field must not be null!"); + this.field = new ExposedField(field, true); + } + + /** + * Returns the field exposed by the {@link Output}. + * + * @return will never be {@literal null}. + */ + protected ExposedField getExposedField() { + return field; + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDbObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + */ + @Override + public abstract DBObject toDbObject(AggregationOperationContext context); + } + + /** + * Output field that uses a Mongo operation (expression object) to generate an output field value. + *

+ * {@link OperationOutput} is used either with a regular field name or an operation keyword (e.g. + * {@literal $sum, $count}). + * + * @author Mark Paluch + */ + protected static class OperationOutput extends Output { + + private final String operation; + private final List values; + + /** + * Creates a new {@link Output} for the given field. + * + * @param operation the actual operation key, must not be {@literal null} or empty. + * @param values the values to pass into the operation, must not be {@literal null}. + */ + public OperationOutput(String operation, Collection values) { + + super(Fields.field(operation)); + + Assert.hasText(operation, "Operation must not be null or empty!"); + Assert.notNull(values, "Values must not be null!"); + + this.operation = operation; + this.values = new ArrayList(values); + } + + private OperationOutput(Field field, OperationOutput operationOutput) { + + super(field); + + this.operation = operationOutput.operation; + this.values = operationOutput.values; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + */ + @Override + public DBObject toDbObject(AggregationOperationContext context) { + + List operationArguments = getOperationArguments(context); + return new BasicDBObject(operation, + operationArguments.size() == 1 ? operationArguments.get(0) : operationArguments); + } + + protected List getOperationArguments(AggregationOperationContext context) { + + List result = new ArrayList(values != null ? values.size() : 1); + + for (Object element : values) { + + if (element instanceof Field) { + result.add(context.getReference((Field) element).toString()); + } else if (element instanceof Fields) { + for (Field field : (Fields) element) { + result.add(context.getReference(field).toString()); + } + } else if (element instanceof AggregationExpression) { + result.add(((AggregationExpression) element).toDbObject(context)); + } else { + result.add(element); + } + } + + return result; + } + + /** + * Returns the field that holds the {@link ProjectionOperationBuilder.OperationProjection}. + * + * @return + */ + protected Field getField() { + return getExposedField(); + } + + /** + * Creates a new instance of this {@link OperationOutput} with the given alias. + * + * @param alias the alias to set + * @return + */ + public OperationOutput withAlias(String alias) { + + final Field aliasedField = Fields.field(alias); + return new OperationOutput(aliasedField, this) { + + @Override + protected Field getField() { + return aliasedField; + } + + @Override + protected List getOperationArguments(AggregationOperationContext context) { + + // We have to make sure that we use the arguments from the "previous" OperationOutput that we replace + // with this new instance. + + return OperationOutput.this.getOperationArguments(context); + } + }; + } + } + + /** + * A {@link Output} based on a SpEL expression. + */ + static class SpelExpressionOutput extends Output { + + private static final SpelExpressionTransformer TRANSFORMER = new SpelExpressionTransformer(); + + private final String expression; + private final Object[] params; + + /** + * Creates a new {@link SpelExpressionOutput} for the given field, SpEL expression and parameters. + * + * @param expression must not be {@literal null} or empty. + * @param parameters must not be {@literal null}. + */ + public SpelExpressionOutput(String expression, Object[] parameters) { + + super(Fields.field(expression)); + + Assert.hasText(expression, "Expression must not be null!"); + Assert.notNull(parameters, "Parameters must not be null!"); + + this.expression = expression; + this.params = parameters.clone(); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.Output#toDbObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + */ + @Override + public DBObject toDbObject(AggregationOperationContext context) { + return (DBObject) toMongoExpression(context, expression, params); + } + + protected static Object toMongoExpression(AggregationOperationContext context, String expression, Object[] params) { + return TRANSFORMER.transform(expression, context, params); + } + } + + /** + * @author Mark Paluch + */ + private static class AggregationExpressionOutput extends Output { + + private final AggregationExpression expression; + + /** + * Creates a new {@link AggregationExpressionOutput}. + * + * @param field + * @param expression + */ + protected AggregationExpressionOutput(Field field, AggregationExpression expression) { + + super(field); + + this.expression = expression; + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.Output#toDbObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + */ + @Override + public DBObject toDbObject(AggregationOperationContext context) { + return expression.toDbObject(context); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java index d89d1a782a..ab40703c7c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java @@ -100,6 +100,7 @@ public class AggregationTests { private static final Version TWO_DOT_FOUR = new Version(2, 4); private static final Version TWO_DOT_SIX = new Version(2, 6); private static final Version THREE_DOT_TWO = new Version(3, 2); + private static final Version THREE_DOT_FOUR = new Version(3, 4); private static boolean initialized = false; @@ -145,6 +146,7 @@ private void cleanDb() { mongoTemplate.dropCollection(InventoryItem.class); mongoTemplate.dropCollection(Sales.class); mongoTemplate.dropCollection(Sales2.class); + mongoTemplate.dropCollection(Art.class); } /** @@ -1580,6 +1582,46 @@ public void letShouldBeAppliedCorrectly() { new BasicDBObjectBuilder().add("_id", "2").add("finalTotal", 10.25D).get())); } + /** + * @see DATAMONGO-1552 + */ + @Test + public void bucketShouldCollectDocumentsIntoABucket() { + + assumeTrue(mongoVersion.isGreaterThanOrEqualTo(THREE_DOT_FOUR)); + + Art a1 = Art.builder().id(1).title("The Pillars of Society").artist("Grosz").year(1926).price(199.99).build(); + Art a2 = Art.builder().id(2).title("Melancholy III").artist("Munch").year(1902).price(280.00).build(); + Art a3 = Art.builder().id(3).title("Dancer").artist("Miro").year(1925).price(76.04).build(); + Art a4 = Art.builder().id(4).title("The Great Wave off Kanagawa").artist("Hokusai").price(167.30).build(); + + mongoTemplate.insert(Arrays.asList(a1, a2, a3, a4), Art.class); + + TypedAggregation aggregation = newAggregation(Art.class, // + bucket("price") // + .withBoundaries(0, 100, 200) // + .withDefaultBucket("other") // + .andOutputCount().as("count") // + .andOutput("title").push().as("titles") // + .andOutputExpression("price * 10").sum().as("sum")); + + AggregationResults result = mongoTemplate.aggregate(aggregation, DBObject.class); + assertThat(result.getMappedResults().size(), is(3)); + + // { "_id" : 0 , "count" : 1 , "titles" : [ "Dancer"] , "sum" : 760.4000000000001} + DBObject bound0 = result.getMappedResults().get(0); + assertThat(bound0, isBsonObject().containing("count", 1).containing("titles.[0]", "Dancer")); + assertThat((Double) bound0.get("sum"), is(closeTo(760.40, 0.1))); + + // { "_id" : 100 , "count" : 2 , "titles" : [ "The Pillars of Society" , "The Great Wave off Kanagawa"] , "sum" : + // 3672.9} + DBObject bound100 = result.getMappedResults().get(1); + assertThat(bound100, isBsonObject().containing("count", 2).containing("_id", 100)); + assertThat((List) bound100.get("titles"), + hasItems("The Pillars of Society", "The Great Wave off Kanagawa")); + assertThat((Double) bound100.get("sum"), is(closeTo(3672.9, 0.1))); + } + private void createUsersWithReferencedPersons() { mongoTemplate.dropCollection(User.class); @@ -1857,4 +1899,18 @@ static class Sales2 { Float tax; boolean applyDiscount; } + + /** + * @see DATAMONGO-1552 + */ + @lombok.Data + @Builder + static class Art { + + int id; + String title; + String artist; + Integer year; + double price; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketOperationUnitTests.java new file mode 100644 index 0000000000..f2ff2ba125 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketOperationUnitTests.java @@ -0,0 +1,254 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.hamcrest.core.Is.*; +import static org.junit.Assert.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +import org.junit.Test; +import org.springframework.data.mongodb.core.aggregation.AggregationExpressions.ArithmeticOperators; + +import com.mongodb.DBObject; +import com.mongodb.util.JSON; + +/** + * Unit tests for {@link BucketOperation}. + * + * @author Mark Paluch + */ +public class BucketOperationUnitTests { + + /** + * @see DATAMONGO-1552 + */ + @Test(expected = IllegalArgumentException.class) + public void rejectsNullFields() { + new BucketOperation((Field) null); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderBucketOutputExpressions() { + + BucketOperation operation = Aggregation.bucket("field") // + .andOutputExpression("(netPrice + surCharge) * taxrate * [0]", 2).as("grossSalesPrice") // + .andOutput("title").push().as("titles"); + + DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(dbObject), is(JSON.parse( + "{ \"grossSalesPrice\" : { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\" , 2]} , \"titles\" : { $push: \"$title\" } }}"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test(expected = IllegalStateException.class) + public void shouldRenderEmptyAggregationExpression() { + bucket("groupby").andOutput("field").as("alias"); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderBucketOutputOperators() { + + BucketOperation operation = Aggregation.bucket("field") // + .andOutputCount().as("titles"); + + DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(dbObject), is(JSON.parse("{ titles : { $sum: 1 } }"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderSumAggregationExpression() { + + DBObject agg = bucket("field") // + .andOutput(ArithmeticOperators.valueOf("quizzes").sum()).as("quizTotal") // + .toDBObject(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg, is(JSON.parse( + "{ $bucket: { groupBy: \"$field\", boundaries: [], output : { quizTotal: { $sum: \"$quizzes\"} } } }"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderDefault() { + + DBObject agg = bucket("field").withDefaultBucket("default bucket").toDBObject(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg, + is(JSON.parse("{ $bucket: { groupBy: \"$field\", boundaries: [], default: \"default bucket\" } }"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderBoundaries() { + + DBObject agg = bucket("field") // + .withDefaultBucket("default bucket") // + .withBoundaries(0) // + .withBoundaries(10, 20).toDBObject(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg, + is(JSON.parse("{ $bucket: { boundaries: [0, 10, 20], default: \"default bucket\", groupBy: \"$field\" } }"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderSumOperator() { + + BucketOperation operation = bucket("field") // + .andOutput("score").sum().as("cummulated_score"); + + DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(dbObject), is(JSON.parse("{ cummulated_score : { $sum: \"$score\" } }"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderSumWithValueOperator() { + + BucketOperation operation = bucket("field") // + .andOutput("score").sum(4).as("cummulated_score"); + + DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(dbObject), is(JSON.parse("{ cummulated_score : { $sum: 4 } }"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderAvgOperator() { + + BucketOperation operation = bucket("field") // + .andOutput("score").avg().as("average"); + + DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(dbObject), is(JSON.parse("{ average : { $avg: \"$score\" } }"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderFirstOperator() { + + BucketOperation operation = bucket("field") // + .andOutput("title").first().as("first_title"); + + DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(dbObject), is(JSON.parse("{ first_title : { $first: \"$title\" } }"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderLastOperator() { + + BucketOperation operation = bucket("field") // + .andOutput("title").last().as("last_title"); + + DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(dbObject), is(JSON.parse("{ last_title : { $last: \"$title\" } }"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderMinOperator() { + + BucketOperation operation = bucket("field") // + .andOutput("score").min().as("min_score"); + + DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(dbObject), is(JSON.parse("{ min_score : { $min: \"$score\" } }"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderPushOperator() { + + BucketOperation operation = bucket("field") // + .andOutput("title").push().as("titles"); + + DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(dbObject), is(JSON.parse("{ titles : { $push: \"$title\" } }"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderAddToSetOperator() { + + BucketOperation operation = bucket("field") // + .andOutput("title").addToSet().as("titles"); + + DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(dbObject), is(JSON.parse("{ titles : { $addToSet: \"$title\" } }"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderSumWithExpression() { + + BucketOperation operation = bucket("field") // + .andOutputExpression("netPrice + tax").sum().as("total"); + + DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(dbObject), is(JSON.parse("{ total : { $sum: { $add : [\"$netPrice\", \"$tax\"]} } }"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderSumWithOwnOutputExpression() { + + BucketOperation operation = bucket("field") // + .andOutputExpression("netPrice + tax").apply("$multiply", 5).as("total"); + + DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(dbObject), + is(JSON.parse("{ total : { $multiply: [ {$add : [\"$netPrice\", \"$tax\"]}, 5] } }"))); + } + + private static DBObject extractOutput(DBObject fromBucketClause) { + return (DBObject) ((DBObject) fromBucketClause.get("$bucket")).get("output"); + } +} From f22cbe20778c80108d3d61a66304480a056c7bea Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 9 Dec 2016 09:55:40 +0100 Subject: [PATCH 3/5] DATAMONGO-1552 - Add $bucketAuto aggregation stage. --- .../mongodb/core/aggregation/Aggregation.java | 26 +- .../core/aggregation/BucketAutoOperation.java | 273 ++++++++++++++++++ .../aggregation/BucketOperationSupport.java | 7 +- .../core/aggregation/AggregationTests.java | 39 +++ .../BucketAutoOperationUnitTests.java | 142 +++++++++ .../aggregation/BucketOperationUnitTests.java | 18 +- 6 files changed, 500 insertions(+), 5 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperationUnitTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java index 0b722b9f7f..2dc29fad75 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java @@ -369,7 +369,7 @@ public static OutOperation out(String outCollectionName) { } /** - * Creates a new {@link BucketOperation} using given {@literal groupByField}. + * Creates a new {@link BucketOperation} given {@literal groupByField}. * * @param groupByField must not be {@literal null} or empty. * @return @@ -379,7 +379,7 @@ public static BucketOperation bucket(String groupByField) { } /** - * Creates a new {@link BucketOperation} using given {@link AggregationExpression group-by expression}. + * Creates a new {@link BucketOperation} given {@link AggregationExpression group-by expression}. * * @param groupByExpression must not be {@literal null}. * @return @@ -388,6 +388,28 @@ public static BucketOperation bucket(AggregationExpression groupByExpression) { return new BucketOperation(groupByExpression); } + /** + * Creates a new {@link BucketAutoOperation} given {@literal groupByField}. + * + * @param groupByField must not be {@literal null} or empty. + * @param buckets number of buckets, must be a positive integer. + * @return + */ + public static BucketAutoOperation bucketAuto(String groupByField, int buckets) { + return new BucketAutoOperation(field(groupByField), buckets); + } + + /** + * Creates a new {@link BucketAutoOperation} given {@link AggregationExpression group-by expression}. + * + * @param groupByExpression must not be {@literal null}. + * @param buckets number of buckets, must be a positive integer. + * @return + */ + public static BucketAutoOperation bucketAuto(AggregationExpression groupByExpression, int buckets) { + return new BucketAutoOperation(groupByExpression, buckets); + } + /** * Creates a new {@link LookupOperation}. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java new file mode 100644 index 0000000000..9ad0d6f660 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java @@ -0,0 +1,273 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.springframework.data.mongodb.core.aggregation.BucketAutoOperation.BucketAutoOperationOutputBuilder; +import org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder; +import org.springframework.util.Assert; + +import com.mongodb.BasicDBObject; +import com.mongodb.DBObject; + +/** + * Encapsulates the aggregation framework {@code $bucketAuto}-operation. + *

+ * Bucket stage is typically used with {@link Aggregation} and {@code $facet}. Categorizes incoming documents into a + * specific number of groups, called buckets, based on a specified expression. Bucket boundaries are automatically + * determined in an attempt to evenly distribute the documents into the specified number of buckets. + *

+ * We recommend to use the static factory method {@link Aggregation#bucketAuto(String, int)} instead of creating instances of + * this class directly. + * + * @see http://docs.mongodb.org/manual/reference/aggregation/bucketAuto/ + * @see BucketOperationSupport + * @author Mark Paluch + * @since 1.10 + */ +public class BucketAutoOperation extends BucketOperationSupport + implements FieldsExposingAggregationOperation { + + private final int buckets; + private final String granularity; + + /** + * Creates a new {@link BucketAutoOperation} given a {@link Field group-by field}. + * + * @param groupByField must not be {@literal null}. + * @param buckets number of buckets, must be a positive integer. + */ + public BucketAutoOperation(Field groupByField, int buckets) { + + super(groupByField); + + Assert.isTrue(buckets > 0, "Number of buckets must be greater 0!"); + + this.buckets = buckets; + this.granularity = null; + } + + /** + * Creates a new {@link BucketAutoOperation} given a {@link AggregationExpression group-by expression}. + * + * @param groupByExpression must not be {@literal null}. + * @param buckets number of buckets, must be a positive integer. + */ + public BucketAutoOperation(AggregationExpression groupByExpression, int buckets) { + + super(groupByExpression); + + Assert.isTrue(buckets > 0, "Number of buckets must be greater 0!"); + + this.buckets = buckets; + this.granularity = null; + } + + private BucketAutoOperation(BucketAutoOperation bucketOperation, Outputs outputs) { + + super(bucketOperation, outputs); + + this.buckets = bucketOperation.buckets; + this.granularity = bucketOperation.granularity; + } + + private BucketAutoOperation(BucketAutoOperation bucketOperation, int buckets, String granularity) { + + super(bucketOperation); + + this.buckets = buckets; + this.granularity = granularity; + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + */ + @Override + public DBObject toDBObject(AggregationOperationContext context) { + + DBObject options = new BasicDBObject(); + + options.put("buckets", buckets); + + if (granularity != null) { + options.put("granularity", granularity); + } + + options.putAll(super.toDBObject(context)); + + return new BasicDBObject("$bucketAuto", options); + } + + /** + * Configures a number of bucket {@literal buckets} and return a new {@link BucketAutoOperation}. + * + * @param buckets must be a positive number. + * @return + */ + public BucketAutoOperation withBuckets(int buckets) { + + Assert.isTrue(buckets > 0, "Number of buckets must be greater 0!"); + return new BucketAutoOperation(this, buckets, granularity); + } + + /** + * Configures {@literal granularity} that specifies the preferred number series to use to ensure that the calculated + * boundary edges end on preferred round numbers or their powers of 10 and return a new {@link BucketAutoOperation}. + * + * @param granularity must not be {@literal null}. + * @return + */ + public BucketAutoOperation withGranularity(Granularity granularity) { + + Assert.notNull(granularity, "Granularity must not be null!"); + + return new BucketAutoOperation(this, buckets, granularity.toMongoGranularity()); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#newBucketOperation(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.Outputs) + */ + @Override + protected BucketAutoOperation newBucketOperation(Outputs outputs) { + return new BucketAutoOperation(this, outputs); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutputExpression(java.lang.String, java.lang.Object[]) + */ + @Override + public ExpressionBucketAutoOperationBuilder andOutputExpression(String expression, Object... params) { + return new ExpressionBucketAutoOperationBuilder(expression, this, params); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutput(org.springframework.data.mongodb.core.aggregation.AggregationExpression) + */ + @Override + public BucketAutoOperationOutputBuilder andOutput(AggregationExpression expression) { + return new BucketAutoOperationOutputBuilder(expression, this); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutput(java.lang.String) + */ + @Override + public BucketAutoOperationOutputBuilder andOutput(String fieldName) { + return new BucketAutoOperationOutputBuilder(Fields.field(fieldName), this); + } + + /** + * {@link OutputBuilder} implementation for {@link BucketAutoOperation}. + */ + public static class BucketAutoOperationOutputBuilder + extends OutputBuilder { + + /** + * Creates a new {@link BucketAutoOperationOutputBuilder} fot the given value and {@link BucketAutoOperation}. + * + * @param value must not be {@literal null}. + * @param operation must not be {@literal null}. + */ + protected BucketAutoOperationOutputBuilder(Object value, BucketAutoOperation operation) { + super(value, operation); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder#apply(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OperationOutput) + */ + @Override + protected BucketAutoOperationOutputBuilder apply(OperationOutput operationOutput) { + return new BucketAutoOperationOutputBuilder(operationOutput, this.operation); + } + } + + /** + * {@link ExpressionBucketOperationBuilderSupport} implementation for {@link BucketAutoOperation} using SpEL + * expression based {@link Output}. + * + * @author Mark Paluch + */ + public static class ExpressionBucketAutoOperationBuilder + extends ExpressionBucketOperationBuilderSupport { + + /** + * Creates a new {@link ExpressionBucketAutoOperationBuilder} for the given value, {@link BucketAutoOperation} and + * parameters. + * + * @param expression must not be {@literal null}. + * @param operation must not be {@literal null}. + * @param parameters + */ + protected ExpressionBucketAutoOperationBuilder(String expression, BucketAutoOperation operation, + Object[] parameters) { + super(expression, operation, parameters); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder#apply(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OperationOutput) + */ + @Override + protected BucketAutoOperationOutputBuilder apply(OperationOutput operationOutput) { + return new BucketAutoOperationOutputBuilder(operationOutput, this.operation); + } + } + + /** + * @author Mark Paluch + */ + public static interface Granularity { + + /** + * @return a String that represents a MongoDB granularity to be used with {@link BucketAutoOperation}. + */ + String toMongoGranularity(); + } + + /** + * Supported MongoDB granularities. + * + * @see https://en.wikipedia.org/wiki/Preferred_number + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/bucketAuto/#granularity + * @author Mark Paluch + */ + public enum Granularities implements Granularity { + + R5, R10, R20, R40, R80, // + + SERIES_1_2_5("1-2-5"), // + + E6, E12, E24, E48, E96, E192, // + + POWERSOF2; + + final String granularity; + + Granularities() { + this.granularity = name(); + } + + Granularities(String granularity) { + this.granularity = granularity; + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.GranularitytoMongoGranularity() + */ + @Override + public String toMongoGranularity() { + return granularity; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java index 13a63e6bd1..05a5bdb4cf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java @@ -222,7 +222,7 @@ public abstract static class OutputBuilder, T exte * @param value must not be {@literal null}. * @param operation must not be {@literal null}. */ - public OutputBuilder(Object value, T operation) { + protected OutputBuilder(Object value, T operation) { Assert.notNull(value, "Value must not be null or empty!"); Assert.notNull(operation, "ProjectionOperation must not be null!"); @@ -433,6 +433,11 @@ private Outputs(Collection current, Output output) { */ protected ExposedFields asExposedFields() { + // The count field is included by default when the output is not specified. + if (isEmpty()) { + return ExposedFields.from(new ExposedField("count", true)); + } + ExposedFields fields = ExposedFields.from(); for (Output output : outputs) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java index ab40703c7c..c716491d22 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java @@ -61,7 +61,9 @@ import org.springframework.data.mongodb.core.aggregation.AggregationExpressions.ConditionalOperators; import org.springframework.data.mongodb.core.aggregation.AggregationExpressions.Let; import org.springframework.data.mongodb.core.aggregation.AggregationExpressions.Let.ExpressionVariable; +import org.springframework.data.mongodb.core.aggregation.AggregationExpressions.Multiply; import org.springframework.data.mongodb.core.aggregation.AggregationTests.CarDescriptor.Entry; +import org.springframework.data.mongodb.core.aggregation.BucketAutoOperation.Granularities; import org.springframework.data.mongodb.core.index.GeospatialIndex; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.query.Criteria; @@ -1622,6 +1624,43 @@ public void bucketShouldCollectDocumentsIntoABucket() { assertThat((Double) bound100.get("sum"), is(closeTo(3672.9, 0.1))); } + /** + * @see DATAMONGO-1552 + */ + @Test + public void bucketAutoShouldCollectDocumentsIntoABucket() { + + assumeTrue(mongoVersion.isGreaterThanOrEqualTo(THREE_DOT_FOUR)); + + Art a1 = Art.builder().id(1).title("The Pillars of Society").artist("Grosz").year(1926).price(199.99).build(); + Art a2 = Art.builder().id(2).title("Melancholy III").artist("Munch").year(1902).price(280.00).build(); + Art a3 = Art.builder().id(3).title("Dancer").artist("Miro").year(1925).price(76.04).build(); + Art a4 = Art.builder().id(4).title("The Great Wave off Kanagawa").artist("Hokusai").price(167.30).build(); + + mongoTemplate.insert(Arrays.asList(a1, a2, a3, a4), Art.class); + + TypedAggregation aggregation = newAggregation(Art.class, // + bucketAuto(Multiply.valueOf("price").multiplyBy(10), 3) // + .withGranularity(Granularities.E12) // + .andOutputCount().as("count") // + .andOutput("title").push().as("titles") // + .andOutputExpression("price * 10").sum().as("sum")); + + AggregationResults result = mongoTemplate.aggregate(aggregation, DBObject.class); + assertThat(result.getMappedResults().size(), is(3)); + + // { "min" : 680.0 , "max" : 820.0 , "count" : 1 , "titles" : [ "Dancer"] , "sum" : 760.4000000000001} + DBObject bound0 = result.getMappedResults().get(0); + assertThat(bound0, isBsonObject().containing("count", 1).containing("titles.[0]", "Dancer").containing("min", 680.0) + .containing("max")); + + // { "min" : 820.0 , "max" : 1800.0 , "count" : 1 , "titles" : [ "The Great Wave off Kanagawa"] , "sum" : 1673.0} + DBObject bound1 = result.getMappedResults().get(1); + assertThat(bound1, isBsonObject().containing("count", 1).containing("min", 820.0)); + assertThat((List) bound1.get("titles"), hasItems("The Great Wave off Kanagawa")); + assertThat((Double) bound1.get("sum"), is(closeTo(1673.0, 0.1))); + } + private void createUsersWithReferencedPersons() { mongoTemplate.dropCollection(User.class); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperationUnitTests.java new file mode 100644 index 0000000000..77388af89c --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperationUnitTests.java @@ -0,0 +1,142 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.hamcrest.core.Is.*; +import static org.junit.Assert.*; +import static org.springframework.data.mongodb.core.DBObjectTestUtils.getAsDBObject; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +import org.junit.Test; +import org.springframework.data.mongodb.core.aggregation.BucketAutoOperation.Granularities; + +import com.mongodb.DBObject; +import com.mongodb.util.JSON; + +/** + * Unit tests for {@link BucketAutoOperation}. + * + * @author Mark Paluch + */ +public class BucketAutoOperationUnitTests { + + /** + * @see DATAMONGO-1552 + */ + @Test(expected = IllegalArgumentException.class) + public void rejectsNullFields() { + new BucketAutoOperation((Field) null, 0); + } + + /** + * @see DATAMONGO-1552 + */ + @Test(expected = IllegalArgumentException.class) + public void rejectsNonPositiveIntegerNullFields() { + new BucketAutoOperation(Fields.field("field"), 0); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderBucketOutputExpressions() { + + BucketAutoOperation operation = Aggregation.bucketAuto("field", 5) // + .andOutputExpression("(netPrice + surCharge) * taxrate * [0]", 2).as("grossSalesPrice") // + .andOutput("title").push().as("titles"); + + DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(dbObject), is(JSON.parse( + "{ \"grossSalesPrice\" : { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\" , 2]} , \"titles\" : { $push: \"$title\" } }}"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test(expected = IllegalStateException.class) + public void shouldRenderEmptyAggregationExpression() { + bucket("groupby").andOutput("field").as("alias"); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderBucketOutputOperators() { + + BucketAutoOperation operation = Aggregation.bucketAuto("field", 5) // + .andOutputCount().as("titles"); + + DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(dbObject), is(JSON.parse("{ titles : { $sum: 1 } }"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderCorrectly() { + + DBObject agg = bucketAuto("field", 1).withBuckets(5).toDBObject(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg, is(JSON.parse("{ $bucketAuto: { groupBy: \"$field\", buckets: 5 } }"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderGranulariy() { + + DBObject agg = bucketAuto("field", 1) // + .withGranularity(Granularities.E24) // + .toDBObject(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg, is(JSON.parse("{ $bucketAuto: { buckets: 1, granularity: \"E24\", groupBy: \"$field\" } }"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderSumOperator() { + + BucketAutoOperation operation = bucketAuto("field", 5) // + .andOutput("score").sum().as("cummulated_score"); + + DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(dbObject), is(JSON.parse("{ cummulated_score : { $sum: \"$score\" } }"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderSumWithOwnOutputExpression() { + + BucketAutoOperation operation = bucketAuto("field", 5) // + .andOutputExpression("netPrice + tax").apply("$multiply", 5).as("total"); + + DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(dbObject), + is(JSON.parse("{ total : { $multiply: [ {$add : [\"$netPrice\", \"$tax\"]}, 5] } }"))); + } + + private static DBObject extractOutput(DBObject fromBucketClause) { + return getAsDBObject(getAsDBObject(fromBucketClause, "$bucketAuto"), "output"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketOperationUnitTests.java index f2ff2ba125..5915c73b5d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketOperationUnitTests.java @@ -15,8 +15,10 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.core.Is.*; +import static org.hamcrest.Matchers.*; +import static org.hamcrest.core.Is.is; import static org.junit.Assert.*; +import static org.springframework.data.mongodb.core.DBObjectTestUtils.*; import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; import org.junit.Test; @@ -248,7 +250,19 @@ public void shouldRenderSumWithOwnOutputExpression() { is(JSON.parse("{ total : { $multiply: [ {$add : [\"$netPrice\", \"$tax\"]}, 5] } }"))); } + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldExposeDefaultCountField() { + + BucketOperation operation = bucket("field"); + + assertThat(operation.getFields().exposesSingleFieldOnly(), is(true)); + assertThat(operation.getFields().getField("count"), is(notNullValue())); + } + private static DBObject extractOutput(DBObject fromBucketClause) { - return (DBObject) ((DBObject) fromBucketClause.get("$bucket")).get("output"); + return getAsDBObject(getAsDBObject(fromBucketClause, "$bucket"), "output"); } } From bb59e4814ea76c50a07148cd6acdc88e6d40b786 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 9 Dec 2016 15:08:40 +0100 Subject: [PATCH 4/5] DATAMONGO-1552 - Add $facet aggregation stage. --- .../mongodb/core/aggregation/Aggregation.java | 87 ++----- .../AggregationOperationRenderer.java | 107 ++++++++ .../core/aggregation/FacetOperation.java | 231 ++++++++++++++++++ .../core/aggregation/AggregationTests.java | 49 ++++ .../aggregation/AggregationUnitTests.java | 17 ++ .../aggregation/FacetOperationUnitTests.java | 117 +++++++++ 6 files changed, 545 insertions(+), 63 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRenderer.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FacetOperation.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FacetOperationUnitTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java index 2dc29fad75..f438c08d68 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java @@ -17,17 +17,12 @@ import static org.springframework.data.mongodb.core.aggregation.Fields.*; -import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; -import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference; -import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; -import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; -import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation; -import org.springframework.data.mongodb.core.aggregation.Fields.*; +import org.springframework.data.mongodb.core.aggregation.FacetOperation.FacetOperationBuilder; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.NearQuery; @@ -64,7 +59,7 @@ public class Aggregation { */ public static final String CURRENT = SystemVariable.CURRENT.toString(); - public static final AggregationOperationContext DEFAULT_CONTEXT = new NoOpAggregationOperationContext(); + public static final AggregationOperationContext DEFAULT_CONTEXT = AggregationOperationRenderer.DEFAULT_CONTEXT; public static final AggregationOptions DEFAULT_OPTIONS = newAggregationOptions().build(); protected final List operations; @@ -410,6 +405,25 @@ public static BucketAutoOperation bucketAuto(AggregationExpression groupByExpres return new BucketAutoOperation(groupByExpression, buckets); } + /** + * Creates a new {@link FacetOperation}. + * + * @return + */ + public static FacetOperation facet() { + return FacetOperation.EMPTY; + } + + /** + * Creates a new {@link FacetOperationBuilder} given {@link Aggregation}. + * + * @param aggregationOperations the sub-pipeline, must not be {@literal null}. + * @return + */ + public static FacetOperationBuilder facet(AggregationOperation... aggregationOperations) { + return facet().and(aggregationOperations); + } + /** * Creates a new {@link LookupOperation}. * @@ -491,24 +505,7 @@ public static AggregationOptions.Builder newAggregationOptions() { */ public DBObject toDbObject(String inputCollectionName, AggregationOperationContext rootContext) { - AggregationOperationContext context = rootContext; - List operationDocuments = new ArrayList(operations.size()); - - for (AggregationOperation operation : operations) { - - operationDocuments.add(operation.toDBObject(context)); - - if (operation instanceof FieldsExposingAggregationOperation) { - - FieldsExposingAggregationOperation exposedFieldsOperation = (FieldsExposingAggregationOperation) operation; - - if (operation instanceof InheritsFieldsAggregationOperation) { - context = new InheritingExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), context); - } else { - context = new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), context); - } - } - } + List operationDocuments = AggregationOperationRenderer.toDBObject(operations, rootContext); DBObject command = new BasicDBObject("aggregate", inputCollectionName); command.put("pipeline", operationDocuments); @@ -524,43 +521,7 @@ public DBObject toDbObject(String inputCollectionName, AggregationOperationConte */ @Override public String toString() { - return SerializationUtils - .serializeToJsonSafely(toDbObject("__collection__", new NoOpAggregationOperationContext())); - } - - /** - * Simple {@link AggregationOperationContext} that just returns {@link FieldReference}s as is. - * - * @author Oliver Gierke - */ - private static class NoOpAggregationOperationContext implements AggregationOperationContext { - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(com.mongodb.DBObject) - */ - @Override - public DBObject getMappedObject(DBObject dbObject) { - return dbObject; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.ExposedFields.AvailableField) - */ - @Override - public FieldReference getReference(Field field) { - return new DirectFieldReference(new ExposedField(field, true)); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(java.lang.String) - */ - @Override - public FieldReference getReference(String name) { - return new DirectFieldReference(new ExposedField(new AggregationField(name), true)); - } + return SerializationUtils.serializeToJsonSafely(toDbObject("__collection__", DEFAULT_CONTEXT)); } /** @@ -600,7 +561,7 @@ public static boolean isReferingToSystemVariable(String fieldRef) { return false; } - /* + /* * (non-Javadoc) * @see java.lang.Enum#toString() */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRenderer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRenderer.java new file mode 100644 index 0000000000..1e4c73e439 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRenderer.java @@ -0,0 +1,107 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.List; + +import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; +import org.springframework.data.mongodb.core.aggregation.Fields.AggregationField; +import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation; + +import com.mongodb.DBObject; + +/** + * Rendering support for {@link AggregationOperation} into a {@link List} of {@link com.mongodb.DBObject}. + * + * @author Mark Paluch + * @since 1.10 + */ +class AggregationOperationRenderer { + + static final AggregationOperationContext DEFAULT_CONTEXT = new NoOpAggregationOperationContext(); + + /** + * Render a {@link List} of {@link AggregationOperation} given {@link AggregationOperationContext} into their + * {@link DBObject} representation. + * + * @param operations must not be {@literal null}. + * @param context must not be {@literal null}. + * @return the {@link List} of {@link DBObject}. + */ + static List toDBObject(List operations, AggregationOperationContext rootContext) { + + List operationDocuments = new ArrayList(operations.size()); + + AggregationOperationContext contextToUse = rootContext; + + for (AggregationOperation operation : operations) { + + operationDocuments.add(operation.toDBObject(contextToUse)); + + if (operation instanceof FieldsExposingAggregationOperation) { + + FieldsExposingAggregationOperation exposedFieldsOperation = (FieldsExposingAggregationOperation) operation; + + if (operation instanceof InheritsFieldsAggregationOperation) { + contextToUse = new InheritingExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), + contextToUse); + } else { + contextToUse = new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), contextToUse); + } + } + } + + return operationDocuments; + } + + /** + * Simple {@link AggregationOperationContext} that just returns {@link FieldReference}s as is. + * + * @author Oliver Gierke + */ + private static class NoOpAggregationOperationContext implements AggregationOperationContext { + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(com.mongodb.DBObject) + */ + @Override + public DBObject getMappedObject(DBObject dbObject) { + return dbObject; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.ExposedFields.AvailableField) + */ + @Override + public FieldReference getReference(Field field) { + return new DirectFieldReference(new ExposedField(field, true)); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(java.lang.String) + */ + @Override + public FieldReference getReference(String name) { + return new DirectFieldReference(new ExposedField(new AggregationField(name), true)); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FacetOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FacetOperation.java new file mode 100644 index 0000000000..9082acedf0 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FacetOperation.java @@ -0,0 +1,231 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.Output; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; +import org.springframework.util.Assert; + +import com.mongodb.BasicDBObject; +import com.mongodb.DBObject; + +/** + * Encapsulates the aggregation framework {@code $facet}-operation. + *

+ * Facet of {@link AggregationOperation}s to be used in an {@link Aggregation}. Processes multiple + * {@link AggregationOperation} pipelines within a single stage on the same set of input documents. Each sub-pipeline + * has its own field in the output document where its results are stored as an array of documents. + * {@link FacetOperation} enables various aggregations on the same set of input documents, without needing to retrieve + * the input documents multiple times. + *

+ * As of MongoDB 3.4, {@link FacetOperation} cannot be used with nested pipelines containing {@link GeoNearOperation}, + * {@link OutOperation} and {@link FacetOperation}. + *

+ * We recommend to use the static factory method {@link Aggregation#facet()} instead of creating instances of this class + * directly. + * + * @see http://docs.mongodb.org/manual/reference/aggregation/facet/ + * @author Mark Paluch + * @since 1.10 + */ +public class FacetOperation implements FieldsExposingAggregationOperation { + + /** + * Empty (initial) {@link FacetOperation}. + */ + public static final FacetOperation EMPTY = new FacetOperation(); + + private final Facets facets; + + /** + * Creates a new {@link FacetOperation}. + */ + public FacetOperation() { + this(Facets.EMPTY); + } + + private FacetOperation(Facets facets) { + this.facets = facets; + } + + /** + * Creates a new {@link FacetOperationBuilder} to append a new facet using {@literal operations}. + *

+ * {@link FacetOperationBuilder} takes a pipeline of {@link AggregationOperation} to categorize documents into a + * single facet. + * + * @param operations must not be {@literal null} or empty. + * @return + */ + public FacetOperationBuilder and(AggregationOperation... operations) { + + Assert.notNull(operations, "AggregationOperations must not be null!"); + Assert.notEmpty(operations, "AggregationOperations must not be empty!"); + + return new FacetOperationBuilder(facets, Arrays.asList(operations)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + */ + @Override + public DBObject toDBObject(AggregationOperationContext context) { + return new BasicDBObject("$facet", facets.toDBObject(context)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields() + */ + @Override + public ExposedFields getFields() { + return facets.asExposedFields(); + } + + /** + * Builder for {@link FacetOperation} by adding existing and the new pipeline of {@link AggregationOperation} to the + * new {@link FacetOperation}. + * + * @author Mark Paluch + */ + public static class FacetOperationBuilder { + + private final Facets current; + private final List operations; + + private FacetOperationBuilder(Facets current, List operations) { + this.current = current; + this.operations = operations; + } + + /** + * Creates a new {@link FacetOperation} that contains the configured pipeline of {@link AggregationOperation} + * exposed as {@literal fieldName} in the resulting facet document. + * + * @param fieldName must not be {@literal null} or empty. + * @return + */ + public FacetOperation as(String fieldName) { + + Assert.hasText(fieldName, "FieldName must not be null or empty!"); + + return new FacetOperation(current.and(fieldName, operations)); + } + } + + /** + * Encapsulates multiple {@link Facet}s + * + * @author Mark Paluch + */ + private static class Facets { + + private static final Facets EMPTY = new Facets(Collections. emptyList()); + + private List facets; + + /** + * Creates a new {@link Facets} given {@link List} of {@link Facet}. + * + * @param facets + */ + private Facets(List facets) { + this.facets = facets; + } + + /** + * @return the {@link ExposedFields} derived from {@link Output}. + */ + protected ExposedFields asExposedFields() { + + ExposedFields fields = ExposedFields.from(); + + for (Facet facet : facets) { + fields = fields.and(facet.getExposedField()); + } + + return fields; + } + + protected DBObject toDBObject(AggregationOperationContext context) { + + DBObject dbObject = new BasicDBObject(facets.size()); + + for (Facet facet : facets) { + dbObject.put(facet.getExposedField().getName(), facet.toDBObjects(context)); + } + + return dbObject; + } + + /** + * Adds a facet to this {@link Facets}. + * + * @param fieldName must not be {@literal null}. + * @param operations must not be {@literal null}. + * @return the new {@link Facets}. + */ + public Facets and(String fieldName, List operations) { + + Assert.hasText(fieldName, "FieldName must not be null or empty!"); + Assert.notNull(operations, "AggregationOperations must not be null!"); + + List facets = new ArrayList(this.facets.size() + 1); + facets.addAll(this.facets); + facets.add(new Facet(new ExposedField(fieldName, true), operations)); + + return new Facets(facets); + } + } + + /** + * A single facet with a {@link ExposedField} and its {@link AggregationOperation} pipeline. + * + * @author Mark Paluch + */ + private static class Facet { + + private final ExposedField exposedField; + private final List operations; + + /** + * Creates a new {@link Facet} given {@link ExposedField} and {@link AggregationOperation} pipeline. + * + * @param exposedField must not be {@literal null}. + * @param operations must not be {@literal null}. + */ + protected Facet(ExposedField exposedField, List operations) { + + Assert.notNull(exposedField, "ExposedField must not be null!"); + Assert.notNull(operations, "AggregationOperations must not be null!"); + + this.exposedField = exposedField; + this.operations = operations; + } + + protected ExposedField getExposedField() { + return exposedField; + } + + protected List toDBObjects(AggregationOperationContext context) { + return AggregationOperationRenderer.toDBObject(operations, context); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java index c716491d22..8087d596ba 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java @@ -74,6 +74,7 @@ import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import com.mongodb.BasicDBList; import com.mongodb.BasicDBObject; import com.mongodb.BasicDBObjectBuilder; import com.mongodb.CommandResult; @@ -1661,6 +1662,54 @@ public void bucketAutoShouldCollectDocumentsIntoABucket() { assertThat((Double) bound1.get("sum"), is(closeTo(1673.0, 0.1))); } + /** + * @see DATAMONGO-1552 + */ + @Test + public void facetShouldCreateFacets() { + + assumeTrue(mongoVersion.isGreaterThanOrEqualTo(THREE_DOT_FOUR)); + + Art a1 = Art.builder().id(1).title("The Pillars of Society").artist("Grosz").year(1926).price(199.99).build(); + Art a2 = Art.builder().id(2).title("Melancholy III").artist("Munch").year(1902).price(280.00).build(); + Art a3 = Art.builder().id(3).title("Dancer").artist("Miro").year(1925).price(76.04).build(); + Art a4 = Art.builder().id(4).title("The Great Wave off Kanagawa").artist("Hokusai").price(167.30).build(); + + mongoTemplate.insert(Arrays.asList(a1, a2, a3, a4), Art.class); + + BucketAutoOperation bucketPrice = bucketAuto(Multiply.valueOf("price").multiplyBy(10), 3) // + .withGranularity(Granularities.E12) // + .andOutputCount().as("count") // + .andOutput("title").push().as("titles") // + .andOutputExpression("price * 10") // + .sum().as("sum"); + + TypedAggregation aggregation = newAggregation(Art.class, // + project("title", "artist", "year", "price"), // + facet(bucketPrice).as("categorizeByPrice") // + .and(bucketAuto("year", 3)).as("categorizeByYear")); + + AggregationResults result = mongoTemplate.aggregate(aggregation, DBObject.class); + assertThat(result.getMappedResults().size(), is(1)); + + DBObject mappedResult = result.getUniqueMappedResult(); + + // [ { "_id" : { "min" : 680.0 , "max" : 820.0} , "count" : 1 , "titles" : [ "Dancer"] , "sum" : 760.4000000000001} + // , + // { "_id" : { "min" : 820.0 , "max" : 1800.0} , "count" : 1 , "titles" : [ "The Great Wave off Kanagawa"] , "sum" : + // 1673.0} , + // { "_id" : { "min" : 1800.0 , "max" : 3300.0} , "count" : 2 , "titles" : [ "The Pillars of Society" , "Melancholy + // III"] , "sum" : 4799.9}] + BasicDBList categorizeByPrice = (BasicDBList) mappedResult.get("categorizeByPrice"); + assertThat(categorizeByPrice, hasSize(3)); + + // [ { "_id" : { "min" : null , "max" : 1902} , "count" : 1} , + // { "_id" : { "min" : 1902 , "max" : 1925} , "count" : 1} , + // { "_id" : { "min" : 1925 , "max" : 1926} , "count" : 2}] + BasicDBList categorizeByYear = (BasicDBList) mappedResult.get("categorizeByYear"); + assertThat(categorizeByYear, hasSize(3)); + } + private void createUsersWithReferencedPersons() { mongoTemplate.dropCollection(User.class); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationUnitTests.java index 684bda8326..88016dddb7 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationUnitTests.java @@ -563,6 +563,23 @@ public void shouldRenderProjectionIfNullWithFallbackFieldReferenceCorrectly() { isBsonObject().containing("$ifNull", Arrays.asList("$chroma", "$fallback"))); } + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldHonorDefaultCountField() { + + DBObject agg = Aggregation + .newAggregation(// + bucket("year"), // + project("count")) // + .toDbObject("foo", Aggregation.DEFAULT_CONTEXT); + + DBObject project = extractPipelineElement(agg, 1, "$project"); + + assertThat(project, isBsonObject().containing("count", 1)); + } + private DBObject extractPipelineElement(DBObject agg, int index, String operation) { List pipeline = (List) agg.get("pipeline"); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FacetOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FacetOperationUnitTests.java new file mode 100644 index 0000000000..ee40128d57 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FacetOperationUnitTests.java @@ -0,0 +1,117 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.hamcrest.MatcherAssert.*; +import static org.hamcrest.Matchers.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +import org.junit.Test; +import org.springframework.data.mongodb.core.query.Criteria; + +import com.mongodb.DBObject; +import com.mongodb.util.JSON; + +/** + * Unit tests for {@link FacetOperation}. + * + * @author Mark Paluch + * @soundtrack Stanley Foort - You Make Me Believe In Magic (Extended Mix) + */ +public class FacetOperationUnitTests { + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderCorrectly() throws Exception { + + FacetOperation facetOperation = new FacetOperation() + .and(match(Criteria.where("price").exists(true)), // + bucket("price") // + .withBoundaries(0, 150, 200, 300, 400) // + .withDefaultBucket("Other") // + .andOutputCount().as("count") // + .andOutput("title").push().as("titles")) // + .as("categorizedByPrice") // + .and(bucketAuto("year", 5)).as("categorizedByYears"); + + DBObject dbObject = facetOperation.toDBObject(Aggregation.DEFAULT_CONTEXT); + + assertThat(dbObject, + is(JSON.parse("{ $facet: { categorizedByPrice: [" + "{ $match: { price: { $exists: true } } }, " + + "{ $bucket: { boundaries: [ 0, 150, 200, 300, 400 ], groupBy: \"$price\", default: \"Other\", " + + "output: { count: { $sum: 1 }, titles: { $push: \"$title\" } } } } ]," + + "categorizedByYears: [ { $bucketAuto: { buckets: 5, groupBy: \"$year\" } } ] } }"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldRenderEmpty() throws Exception { + + FacetOperation facetOperation = facet(); + + DBObject dbObject = facetOperation.toDBObject(Aggregation.DEFAULT_CONTEXT); + + assertThat(dbObject, is(JSON.parse("{ $facet: { } }"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test(expected = IllegalArgumentException.class) + public void shouldRejectNonExistingFields() throws Exception { + + FacetOperation facetOperation = new FacetOperation() + .and(project("price"), // + bucket("price") // + .withBoundaries(0, 150, 200, 300, 400) // + .withDefaultBucket("Other") // + .andOutputCount().as("count") // + .andOutput("title").push().as("titles")) // + .as("categorizedByPrice"); + + DBObject dbObject = facetOperation.toDBObject(Aggregation.DEFAULT_CONTEXT); + + assertThat(dbObject, + is(JSON.parse("{ $facet: { categorizedByPrice: [" + "{ $match: { price: { $exists: true } } }, " + + "{ $bucket: {boundaries: [ 0, 150, 200, 300, 400 ], groupBy: \"$price\", default: \"Other\", " + + "output: { count: { $sum: 1 }, titles: { $push: \"$title\" } } } } ]," + + "categorizedByYears: [ { $bucketAuto: { buckets: 5, groupBy: \"$year\" } } ] } }"))); + } + + /** + * @see DATAMONGO-1552 + */ + @Test + public void shouldHonorProjectedFields() { + + FacetOperation facetOperation = new FacetOperation() + .and(project("price").and("title").as("name"), // + bucketAuto("price", 5) // + .andOutput("name").push().as("titles")) // + .as("categorizedByPrice"); + + DBObject dbObject = facetOperation.toDBObject(Aggregation.DEFAULT_CONTEXT); + + assertThat(dbObject, + is(JSON.parse("{ $facet: { categorizedByPrice: [" + "{ $project: { price: 1, name: \"$title\" } }, " + + "{ $bucketAuto: { buckets: 5, groupBy: \"$price\", " + + "output: { titles: { $push: \"$name\" } } } } ] } }"))); + } +} From bf376e4b64edf5dc83c2bda0ee60eb15a47111ca Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 9 Dec 2016 15:08:56 +0100 Subject: [PATCH 5/5] DATAMONGO-1552 - Documentation. --- src/main/asciidoc/new-features.adoc | 1 + src/main/asciidoc/reference/mongodb.adoc | 82 +++++++++++++++++++++++- 2 files changed, 81 insertions(+), 2 deletions(-) diff --git a/src/main/asciidoc/new-features.adoc b/src/main/asciidoc/new-features.adoc index 8066c6a086..dad476b2eb 100644 --- a/src/main/asciidoc/new-features.adoc +++ b/src/main/asciidoc/new-features.adoc @@ -5,6 +5,7 @@ == What's new in Spring Data MongoDB 1.10 * Support for `$min`, `$max` and `$slice` operators via `Update`. * Support for `$cond` and `$ifNull` operators via `Aggregation`. +* Multi-faceted aggregations using `$facet`, `$bucket` and `$bucketAuto` via `Aggregation`. [[new-features.1-9-0]] == What's new in Spring Data MongoDB 1.9 diff --git a/src/main/asciidoc/reference/mongodb.adoc b/src/main/asciidoc/reference/mongodb.adoc index 0b9b03fc0c..be7ce87d76 100644 --- a/src/main/asciidoc/reference/mongodb.adoc +++ b/src/main/asciidoc/reference/mongodb.adoc @@ -1674,7 +1674,7 @@ At the time of this writing we provide support for the following Aggregation Ope [cols="2*"] |=== | Pipeline Aggregation Operators -| project, skip, limit, lookup, unwind, group, sort, geoNear +| project, skip, limit, lookup, unwind, group, sort, geoNear, facet, bucket, bucketAuto | Set Aggregation Operators | setEquals, setIntersection, setUnion, setDifference, setIsSubset, anyElementTrue, allElementsTrue @@ -1733,10 +1733,88 @@ Note that more examples for project operations can be found in the `AggregationT Note that further details regarding the projection expressions can be found in the http://docs.mongodb.org/manual/reference/operator/aggregation/project/#pipe._S_project[corresponding section] of the MongoDB Aggregation Framework reference documentation. +[[mongo.aggregation.facet]] +=== Faceted classification + +MongoDB supports as of Version 3.4 faceted classification using the Aggregation Framework. A faceted classification uses semantic categories, either general or subject-specific, that are combined to create the full classification entry. Documents flowing through the aggregation pipeline are classificated into buckets. A multi-faceted classification enables various aggregations on the same set of input documents, without needing to retrieve the input documents multiple times. + +==== Buckets + +Bucket operations categorize incoming documents into groups, called buckets, based on a specified expression and bucket boundaries. Bucket operations require a grouping field or grouping expression. They can be defined via the `bucket()`/`bucketAuto()` methods of the `Aggregate` class. `BucketOperation` and `BucketAutoOperation` can expose accumulations based on aggregation expressions for input documents. The bucket operation can be extended with additional parameters through a fluent API via the `with…()` methods, the `andOutput(String)` method and aliased via the `as(String)` method. Each bucket is represented as a document in the output. + +`BucketOperation` takes a defined set of boundaries to group incoming documents into these categories. Boundaries are required to be sorted. + +.Bucket operation examples +==== +[source,java] +---- +// will generate {$bucket: {groupBy: $price, boundaries: [0, 100, 400]}} +bucket("price").withBoundaries(0, 100, 400); + +// will generate {$bucket: {groupBy: $price, default: "Other" boundaries: [0, 100]}} +bucket("price").withBoundaries(0, 100).withDefault("Other"); + +// will generate {$bucket: {groupBy: $price, boundaries: [0, 100], output: { count: { $sum: 1}}}} +bucket("price").withBoundaries(0, 100).andOutputCount().as("count"); + +// will generate {$bucket: {groupBy: $price, boundaries: [0, 100], 5, output: { titles: { $push: "$title"}}} +bucket("price").withBoundaries(0, 100).andOutput("title").push().as("titles"); +---- +==== + +`BucketAutoOperation` determines boundaries itself in an attempt to evenly distribute documents into a specified number of buckets. `BucketAutoOperation` optionally takes a granularity specifies the https://en.wikipedia.org/wiki/Preferred_number[preferred number] series to use to ensure that the calculated boundary edges end on preferred round numbers or their powers of 10. + +.Bucket operation examples +==== +[source,java] +---- +// will generate {$bucketAuto: {groupBy: $price, buckets: 5}} +bucketAuto("price", 5) + +// will generate {$bucketAuto: {groupBy: $price, buckets: 5, granularity: "E24"}} +bucketAuto("price", 5).withGranularity(Granularities.E24).withDefault("Other"); + +// will generate {$bucketAuto: {groupBy: $price, buckets: 5, output: { titles: { $push: "$title"}}} +bucketAuto("price", 5).andOutput("title").push().as("titles"); +---- +==== + +Bucket operations can use `AggregationExpression` via `andOutput()` and <> via `andOutputExpression()` to create output fields in buckets. + +Note that further details regarding bucket expressions can be found in the http://docs.mongodb.org/manual/reference/operator/aggregation/bucket/[`$bucket` section] and +http://docs.mongodb.org/manual/reference/operator/aggregation/bucketAuto/[`$bucketAuto` section] of the MongoDB Aggregation Framework reference documentation. + +==== Multi-faceted aggregation + +Multiple aggregation pipelines can be used to create multi-faceted aggregations which characterize data across multiple dimensions, or facets, within a single aggregation stage. Multi-faceted aggregations provide multiple filters and categorizations to guide data browsing and analysis. A common implementation of faceting is how many online retailers provide ways to narrow down search results by applying filters on product price, manufacturer, size, etc. + +A `FacetOperation` can be defined via the `facet()` method of the `Aggregation` class. It can be customized with multiple aggregation pipelines via the `and()` method. Each sub-pipeline has its own field in the output document where its results are stored as an array of documents. + +Sub-pipelines can project and filter input documents prior grouping. Common cases are extraction of date parts or calculations before categorization. + +.Facet operation examples +==== +[source,java] +---- +// will generate {$facet: {categorizedByPrice: [ { $match: { price: {$exists : true}}}, { $bucketAuto: {groupBy: $price, buckets: 5}}]}} +facet(match(Criteria.where("price").exists(true)), bucketAuto("price", 5)).as("categorizedByPrice")) + +// will generate {$facet: {categorizedByYear: [ +// { $project: { title: 1, publicationYear: { $year: "publicationDate"}}}, +// { $bucketAuto: {groupBy: $price, buckets: 5, output: { titles: {$push:"$title"}}} +// ]}} +facet(project("title").and("publicationDate").extractYear().as("publicationYear"), + bucketAuto("publicationYear", 5).andOutput("title").push().as("titles")) + .as("categorizedByYear")) +---- +==== + +Note that further details regarding facet operation can be found in the http://docs.mongodb.org/manual/reference/operator/aggregation/facet/[`$facet` section] of the MongoDB Aggregation Framework reference documentation. + [[mongo.aggregation.projection.expressions]] ==== Spring Expression Support in Projection Expressions -As of Version 1.4.0 we support the use of SpEL expression in projection expressions via the `andExpression` method of the `ProjectionOperation` class. This allows you to define the desired expression as a SpEL expression which is translated into a corresponding MongoDB projection expression part on query execution. This makes it much easier to express complex calculations. +We support the use of SpEL expression in projection expressions via the `andExpression` method of the `ProjectionOperation` and `BucketOperation` classes. This allows you to define the desired expression as a SpEL expression which is translated into a corresponding MongoDB projection expression part on query execution. This makes it much easier to express complex calculations. ===== Complex calculations with SpEL expressions