Skip to content

DATAMONGO-1666 - Consider collection type in bulk DBRef fetching. #457

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>2.0.0.BUILD-SNAPSHOT</version>
<version>2.0.0.DATAMONGO-1666-SNAPSHOT</version>
<packaging>pom</packaging>

<name>Spring Data MongoDB</name>
Expand Down
4 changes: 2 additions & 2 deletions spring-data-mongodb-cross-store/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>2.0.0.BUILD-SNAPSHOT</version>
<version>2.0.0.DATAMONGO-1666-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

Expand Down Expand Up @@ -48,7 +48,7 @@
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb</artifactId>
<version>2.0.0.BUILD-SNAPSHOT</version>
<version>2.0.0.DATAMONGO-1666-SNAPSHOT</version>
</dependency>

<!-- reactive -->
Expand Down
2 changes: 1 addition & 1 deletion spring-data-mongodb-distribution/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>2.0.0.BUILD-SNAPSHOT</version>
<version>2.0.0.DATAMONGO-1666-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

Expand Down
2 changes: 1 addition & 1 deletion spring-data-mongodb-log4j/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>2.0.0.BUILD-SNAPSHOT</version>
<version>2.0.0.DATAMONGO-1666-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

Expand Down
2 changes: 1 addition & 1 deletion spring-data-mongodb/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>2.0.0.BUILD-SNAPSHOT</version>
<version>2.0.0.DATAMONGO-1666-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import org.bson.conversions.Bson;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
Expand All @@ -55,7 +56,6 @@
import org.springframework.data.mapping.model.SpELExpressionEvaluator;
import org.springframework.data.mapping.model.SpELExpressionParameterValueProvider;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.core.convert.MongoConverters.ObjectIdToBigIntegerConverter;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent;
Expand Down Expand Up @@ -241,17 +241,16 @@ private <S extends Object> S read(TypeInformation<S> type, Bson bson, ObjectPath
}
// Retrieve persistent entity info

Document target = bson instanceof BasicDBObject ? new Document((BasicDBObject)bson) : (Document) bson;
Document target = bson instanceof BasicDBObject ? new Document((BasicDBObject) bson) : (Document) bson;

return read((MongoPersistentEntity<S>) mappingContext.getRequiredPersistentEntity(typeToUse), target,
path);
return read((MongoPersistentEntity<S>) mappingContext.getRequiredPersistentEntity(typeToUse), target, path);
}

private ParameterValueProvider<MongoPersistentProperty> getParameterProvider(MongoPersistentEntity<?> entity,
Bson source, DefaultSpELExpressionEvaluator evaluator, ObjectPath path) {

MongoDbPropertyValueProvider provider = new MongoDbPropertyValueProvider(source, evaluator, path);
PersistentEntityParameterValueProvider<MongoPersistentProperty> parameterProvider = new PersistentEntityParameterValueProvider<MongoPersistentProperty>(
PersistentEntityParameterValueProvider<MongoPersistentProperty> parameterProvider = new PersistentEntityParameterValueProvider<>(
entity, provider, path.getCurrentObject());

return new ConverterAwareSpELExpressionParameterValueProvider(evaluator, conversionService, parameterProvider,
Expand All @@ -274,7 +273,7 @@ private <S extends Object> S read(final MongoPersistentEntity<S> entity, final D
DocumentAccessor documentAccessor = new DocumentAccessor(bson);

// make sure id property is set before all other properties
Optional<Object> idValue = idProperty.filter(it -> documentAccessor.hasValue(it)).map(it -> {
Optional<Object> idValue = idProperty.filter(documentAccessor::hasValue).map(it -> {

Optional<Object> value = getValueInternal(it, bson, evaluator, path);
accessor.setProperty(it, value);
Expand All @@ -286,42 +285,38 @@ private <S extends Object> S read(final MongoPersistentEntity<S> entity, final D
idValue.isPresent() ? idProperty.map(it -> bson.get(it.getFieldName())).orElse(null) : null);

// Set properties not already set in the constructor
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
public void doWithPersistentProperty(MongoPersistentProperty prop) {

// we skip the id property since it was already set
if (idProperty != null && idProperty.equals(prop)) {
return;
}
entity.doWithProperties((PropertyHandler<MongoPersistentProperty>) prop -> {

if (entity.isConstructorArgument(prop) || !documentAccessor.hasValue(prop)) {
return;
}
// we skip the id property since it was already set
if (idProperty != null && idProperty.equals(prop)) {
return;
}

accessor.setProperty(prop, getValueInternal(prop, bson, evaluator, currentPath));
if (entity.isConstructorArgument(prop) || !documentAccessor.hasValue(prop)) {
return;
}

accessor.setProperty(prop, getValueInternal(prop, bson, evaluator, currentPath));
});

// Handle associations
entity.doWithAssociations(new AssociationHandler<MongoPersistentProperty>() {
public void doWithAssociation(Association<MongoPersistentProperty> association) {
entity.doWithAssociations((AssociationHandler<MongoPersistentProperty>) association -> {

final MongoPersistentProperty property = association.getInverse();
Object value = documentAccessor.get(property);
final MongoPersistentProperty property = association.getInverse();
Object value = documentAccessor.get(property);

if (value == null || entity.isConstructorArgument(property)) {
return;
}
if (value == null || entity.isConstructorArgument(property)) {
return;
}

DBRef dbref = value instanceof DBRef ? (DBRef) value : null;
DBRef dbref = value instanceof DBRef ? (DBRef) value : null;

DbRefProxyHandler handler = new DefaultDbRefProxyHandler(spELContext, mappingContext,
MappingMongoConverter.this);
DbRefResolverCallback callback = new DefaultDbRefResolverCallback(bson, currentPath, evaluator,
MappingMongoConverter.this);
DbRefProxyHandler handler = new DefaultDbRefProxyHandler(spELContext, mappingContext,
MappingMongoConverter.this);
DbRefResolverCallback callback = new DefaultDbRefResolverCallback(bson, currentPath, evaluator,
MappingMongoConverter.this);

accessor.setProperty(property, dbRefResolver.resolveDbRef(property, dbref, callback, handler));
}
accessor.setProperty(property, dbRefResolver.resolveDbRef(property, dbref, callback, handler));
});

return result;
Expand Down Expand Up @@ -431,31 +426,26 @@ protected void writeInternal(Object obj, final Bson bson, MongoPersistentEntity<
prop -> dbObjectAccessor.computeIfAbsent(prop, () -> idMapper.convertId(accessor.getProperty(prop))));

// Write the properties
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
public void doWithPersistentProperty(MongoPersistentProperty prop) {
entity.doWithProperties((PropertyHandler<MongoPersistentProperty>) prop -> {

if (idProperty.map(it -> it.equals(prop)).orElse(false) || !prop.isWritable()) {
return;
}
if (idProperty.map(it -> it.equals(prop)).orElse(false) || !prop.isWritable()) {
return;
}

accessor.getProperty(prop).ifPresent(it -> {
if (!conversions.isSimpleType(it.getClass())) {
accessor.getProperty(prop).ifPresent(it -> {
if (!conversions.isSimpleType(it.getClass())) {

writePropertyInternal(it, bson, prop);
} else {
writeSimpleInternal(it, bson, prop);
}
});
}
writePropertyInternal(it, bson, prop);
} else {
writeSimpleInternal(it, bson, prop);
}
});
});

entity.doWithAssociations(new AssociationHandler<MongoPersistentProperty>() {

public void doWithAssociation(Association<MongoPersistentProperty> association) {
entity.doWithAssociations((AssociationHandler<MongoPersistentProperty>) association -> {

MongoPersistentProperty inverseProp = association.getInverse();
accessor.getProperty(inverseProp).ifPresent(it -> writePropertyInternal(it, bson, inverseProp));
}
MongoPersistentProperty inverseProp = association.getInverse();
accessor.getProperty(inverseProp).ifPresent(it -> writePropertyInternal(it, bson, inverseProp));
});
}

Expand Down Expand Up @@ -563,7 +553,7 @@ protected List<Object> createCollection(Collection<?> collection, MongoPersisten
return writeCollectionInternal(collection, Optional.of(property.getTypeInformation()), new BasicDBList());
}

List<Object> dbList = new ArrayList<Object>();
List<Object> dbList = new ArrayList<>(collection.size());

for (Object element : collection) {

Expand Down Expand Up @@ -625,7 +615,7 @@ protected Bson createMap(Map<Object, Object> map, MongoPersistentProperty proper
private BasicDBList writeCollectionInternal(Collection<?> source, Optional<TypeInformation<?>> type,
BasicDBList sink) {

Optional<TypeInformation<?>> componentType = type.flatMap(it -> it.getComponentType());
Optional<TypeInformation<?>> componentType = type.flatMap(TypeInformation::getComponentType);

for (Object element : source) {

Expand Down Expand Up @@ -759,7 +749,7 @@ protected String potentiallyUnescapeMapKey(String source) {
*/
protected void addCustomTypeKeyIfNecessary(Optional<TypeInformation<?>> type, Object value, Bson bson) {

Optional<Class<?>> actualType = type.map(it -> it.getActualType()).map(it -> it.getType());
Optional<Class<?>> actualType = type.map(TypeInformation::getActualType).map(TypeInformation::getType);
Class<?> reference = actualType.orElse(Object.class);
Class<?> valueType = ClassUtils.getUserClass(value.getClass());

Expand Down Expand Up @@ -870,7 +860,7 @@ protected DBRef createDBRef(Object target, MongoPersistentProperty property) {
}

return dbRefResolver.createDbRef(property == null ? null : property.getDBRef(), entity,
idMapper.convertId(id instanceof Optional ? (Optional)id : Optional.ofNullable(id)).orElse(null));
idMapper.convertId(id instanceof Optional ? (Optional) id : Optional.ofNullable(id)).orElse(null));

}).orElseThrow(() -> new MappingException("No id property found on class " + entity.getType()));
}
Expand Down Expand Up @@ -905,15 +895,18 @@ private Object readCollectionOrArray(TypeInformation<?> targetType, List sourceV
Class<?> rawComponentType = componentType.getType();

collectionType = Collection.class.isAssignableFrom(collectionType) ? collectionType : List.class;
Collection<Object> items = targetType.getType().isArray() ? new ArrayList<Object>()
Collection<Object> items = targetType.getType().isArray() ? new ArrayList<>(sourceValue.size())
: CollectionFactory.createCollection(collectionType, rawComponentType, sourceValue.size());

if (sourceValue.isEmpty()) {
return getPotentiallyConvertedSimpleRead(items, collectionType);
}

if (!DBRef.class.equals(rawComponentType) && isCollectionOfDbRefWhereBulkFetchIsPossible(sourceValue)) {
return bulkReadAndConvertDBRefs((List<DBRef>) (List) (sourceValue), componentType, path, rawComponentType);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That's a funny one. I didn't even know casting something twice was allowed :D.


List<Object> objects = bulkReadAndConvertDBRefs((List<DBRef>) sourceValue, componentType, path,
rawComponentType);
return getPotentiallyConvertedSimpleRead(objects, targetType.getType());
}

for (Object dbObjItem : sourceValue) {
Expand Down Expand Up @@ -962,8 +955,8 @@ protected Map<Object, Object> readMap(TypeInformation<?> type, Bson bson, Object
Class<?> mapType = typeMapper.readType(bson, type).getType();

Optional<TypeInformation<?>> valueType = type.getMapValueType();
Class<?> rawKeyType = type.getComponentType().map(it -> it.getType()).orElse(null);
Class<?> rawValueType = type.getMapValueType().map(it -> it.getType()).orElse(null);
Class<?> rawKeyType = type.getComponentType().map(TypeInformation::getType).orElse(null);
Class<?> rawValueType = type.getMapValueType().map(TypeInformation::getType).orElse(null);

Map<String, Object> sourceMap = asMap(bson);
Map<Object, Object> map = CollectionFactory.createMap(mapType, rawKeyType, sourceMap.keySet().size());
Expand Down Expand Up @@ -1006,7 +999,7 @@ protected Map<Object, Object> readMap(TypeInformation<?> type, Bson bson, Object
}

@SuppressWarnings("unchecked")
private Map<String, Object> asMap(Bson bson) {
private static Map<String, Object> asMap(Bson bson) {

if (bson instanceof Document) {
return (Document) bson;
Expand All @@ -1020,7 +1013,7 @@ private Map<String, Object> asMap(Bson bson) {
String.format("Cannot read %s. as map. Given Bson must be a Document or DBObject!", bson.getClass()));
}

private void addToMap(Bson bson, String key, Object value) {
private static void addToMap(Bson bson, String key, Object value) {

if (bson instanceof Document) {
((Document) bson).put(key, value);
Expand All @@ -1035,7 +1028,7 @@ private void addToMap(Bson bson, String key, Object value) {
}

@SuppressWarnings("unchecked")
private void addAllToMap(Bson bson, Map value) {
private static void addAllToMap(Bson bson, Map value) {

if (bson instanceof Document) {
((Document) bson).putAll(value);
Expand All @@ -1051,7 +1044,7 @@ private void addAllToMap(Bson bson, Map value) {
String.format("Cannot add all to %s. Given Bson must be a Document or DBObject.", bson.getClass()));
}

private void removeFromMap(Bson bson, String key) {
private static void removeFromMap(Bson bson, String key) {

if (bson instanceof Document) {
((Document) bson).remove(key);
Expand Down Expand Up @@ -1118,7 +1111,7 @@ public Object convertToMongoType(Object obj, TypeInformation<?> typeInformation)

if (obj instanceof Map) {

Map<Object, Object> converted = new LinkedHashMap<Object, Object>();
Map<Object, Object> converted = new LinkedHashMap<>(((Map)obj).size(), 1);
Document result = new Document();

for (Map.Entry<Object, Object> entry : ((Map<Object, Object>) obj).entrySet()) {
Expand Down Expand Up @@ -1252,7 +1245,7 @@ public <T> Optional<T> getPropertyValue(MongoPersistentProperty property) {
return Optional

.ofNullable(property.getSpelExpression()//
.map(it -> evaluator.evaluate(it))//
.map(evaluator::evaluate)//
.orElseGet(() -> source.get(property)))//
.map(it -> readValue(it, property.getTypeInformation(), path));
}
Expand Down Expand Up @@ -1335,7 +1328,7 @@ private <T> T readAndConvertDBRef(DBRef dbref, TypeInformation<?> type, ObjectPa
private void bulkReadAndConvertDBRefMapIntoTarget(TypeInformation<?> valueType, Class<?> rawValueType,
Map<String, Object> sourceMap, Map<Object, Object> targetMap) {

LinkedHashMap<String, Object> referenceMap = new LinkedHashMap<String, Object>(sourceMap);
LinkedHashMap<String, Object> referenceMap = new LinkedHashMap<>(sourceMap);
List<Object> convertedObjects = bulkReadAndConvertDBRefs((List<DBRef>) new ArrayList(referenceMap.values()),
valueType, ObjectPath.ROOT, rawValueType);
int index = 0;
Expand All @@ -1358,19 +1351,19 @@ private <T> List<T> bulkReadAndConvertDBRefs(List<DBRef> dbrefs, TypeInformation
? Collections.singletonList(readRef(dbrefs.iterator().next())) : bulkReadRefs(dbrefs);
String collectionName = dbrefs.iterator().next().getCollectionName();

List<T> targeList = new ArrayList<T>(dbrefs.size());
List<T> targeList = new ArrayList<>(dbrefs.size());

for (Document document : referencedRawDocuments) {

if (document != null) {
maybeEmitEvent(new AfterLoadEvent<T>(document, (Class<T>) rawType, collectionName));
maybeEmitEvent(new AfterLoadEvent<>(document, (Class<T>) rawType, collectionName));
}

final T target = (T) read(type, document, path);
targeList.add(target);

if (target != null) {
maybeEmitEvent(new AfterConvertEvent<T>(document, target, collectionName));
maybeEmitEvent(new AfterConvertEvent<>(document, target, collectionName));
}
}

Expand Down Expand Up @@ -1419,7 +1412,7 @@ private static boolean isCollectionOfDbRefWhereBulkFetchIsPossible(Iterable<Obje

Assert.notNull(source, "Iterable of DBRefs must not be null!");

Set<String> collectionsFound = new HashSet<String>();
Set<String> collectionsFound = new HashSet<>();

for (Object dbObjItem : source) {

Expand Down
Loading