001/* 002 * (C) Copyright 2017 Nuxeo (http://nuxeo.com/) and others. 003 * 004 * Licensed under the Apache License, Version 2.0 (the "License"); 005 * you may not use this file except in compliance with the License. 006 * You may obtain a copy of the License at 007 * 008 * http://www.apache.org/licenses/LICENSE-2.0 009 * 010 * Unless required by applicable law or agreed to in writing, software 011 * distributed under the License is distributed on an "AS IS" BASIS, 012 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 013 * See the License for the specific language governing permissions and 014 * limitations under the License. 015 * 016 * Contributors: 017 * Funsho David 018 * 019 */ 020 021package org.nuxeo.directory.mongodb; 022 023import static org.nuxeo.directory.mongodb.MongoDBSerializationHelper.MONGODB_ID; 024import static org.nuxeo.directory.mongodb.MongoDBSerializationHelper.MONGODB_SEQ; 025 026import java.io.Serializable; 027import java.util.ArrayList; 028import java.util.Calendar; 029import java.util.Collections; 030import java.util.HashMap; 031import java.util.LinkedList; 032import java.util.List; 033import java.util.Map; 034import java.util.Set; 035import java.util.regex.Pattern; 036import java.util.stream.Collectors; 037 038import org.apache.commons.lang.StringUtils; 039import org.bson.Document; 040import org.bson.conversions.Bson; 041import org.nuxeo.ecm.core.api.DocumentModel; 042import org.nuxeo.ecm.core.api.DocumentModelList; 043import org.nuxeo.ecm.core.api.PropertyException; 044import org.nuxeo.ecm.core.api.impl.DocumentModelListImpl; 045import org.nuxeo.ecm.core.api.model.Property; 046import org.nuxeo.ecm.core.api.security.SecurityConstants; 047import org.nuxeo.ecm.core.schema.types.Field; 048import org.nuxeo.ecm.core.schema.types.Type; 049import org.nuxeo.ecm.core.schema.types.primitives.IntegerType; 050import org.nuxeo.ecm.core.schema.types.primitives.LongType; 051import org.nuxeo.ecm.directory.BaseDirectoryDescriptor.SubstringMatchType; 052import org.nuxeo.ecm.directory.BaseSession; 053import org.nuxeo.ecm.directory.DirectoryException; 054import org.nuxeo.ecm.directory.PasswordHelper; 055import org.nuxeo.ecm.directory.Reference; 056import org.nuxeo.ecm.directory.Session; 057 058import com.mongodb.MongoClient; 059import com.mongodb.MongoWriteException; 060import com.mongodb.client.FindIterable; 061import com.mongodb.client.MongoCollection; 062import com.mongodb.client.model.FindOneAndUpdateOptions; 063import com.mongodb.client.model.ReturnDocument; 064import com.mongodb.client.model.Updates; 065import com.mongodb.client.result.DeleteResult; 066import com.mongodb.client.result.UpdateResult; 067 068/** 069 * MongoDB implementation of a {@link Session} 070 * 071 * @since 9.1 072 */ 073public class MongoDBSession extends BaseSession { 074 075 protected MongoClient client; 076 077 protected String dbName; 078 079 protected String countersCollectionName; 080 081 public MongoDBSession(MongoDBDirectory directory) { 082 super(directory, MongoDBReference.class); 083 MongoDBDirectoryDescriptor desc = directory.getDescriptor(); 084 client = MongoDBConnectionHelper.newMongoClient(desc.getServerUrl()); 085 dbName = desc.getDatabaseName(); 086 countersCollectionName = directory.getCountersCollectionName(); 087 } 088 089 @Override 090 public MongoDBDirectory getDirectory() { 091 return (MongoDBDirectory) directory; 092 } 093 094 @Override 095 protected DocumentModel createEntryWithoutReferences(Map<String, Object> fieldMap) { 096 // Filter out reference fields for creation as we keep it in a different collection 097 Map<String, Object> newDocMap = fieldMap.entrySet() 098 .stream() 099 .filter(entry -> getDirectory().getReferences(entry.getKey()) == null) 100 .collect(HashMap::new, (m, v) -> m.put(v.getKey(), v.getValue()), 101 HashMap::putAll); 102 103 String idFieldName = schemaFieldMap.get(getIdField()).getName().getPrefixedName(); 104 String id; 105 if (autoincrementId) { 106 Document filter = MongoDBSerializationHelper.fieldMapToBson(MONGODB_ID, directoryName); 107 Bson update = Updates.inc(MONGODB_SEQ, 1L); 108 FindOneAndUpdateOptions options = new FindOneAndUpdateOptions().returnDocument(ReturnDocument.AFTER); 109 Long longId = getCollection(countersCollectionName).findOneAndUpdate(filter, update, options) 110 .getLong(MONGODB_SEQ); 111 fieldMap.put(idFieldName, longId); 112 newDocMap.put(idFieldName, longId); 113 id = String.valueOf(longId); 114 } else { 115 id = String.valueOf(fieldMap.get(idFieldName)); 116 if (hasEntry(id)) { 117 throw new DirectoryException(String.format("Entry with id %s already exists", id)); 118 } 119 } 120 try { 121 Document bson = MongoDBSerializationHelper.fieldMapToBson(newDocMap); 122 String password = (String) newDocMap.get(getPasswordField()); 123 if (password != null && !PasswordHelper.isHashed(password)) { 124 password = PasswordHelper.hashPassword(password, passwordHashAlgorithm); 125 bson.append(getPasswordField(), password); 126 } 127 getCollection().insertOne(bson); 128 } catch (MongoWriteException e) { 129 throw new DirectoryException(e); 130 } 131 return createEntryModel(null, schemaName, id, fieldMap, isReadOnly()); 132 } 133 134 @Override 135 protected List<String> updateEntryWithoutReferences(DocumentModel docModel) throws DirectoryException { 136 Map<String, Object> fieldMap = new HashMap<>(); 137 List<String> referenceFieldList = new LinkedList<>(); 138 139 for (String fieldName : schemaFieldMap.keySet()) { 140 if (fieldName.equals(getIdField())) { 141 continue; 142 } 143 Property prop = docModel.getPropertyObject(schemaName, fieldName); 144 if (fieldName.equals(getPasswordField()) && StringUtils.isEmpty((String) prop.getValue())) { 145 continue; 146 } 147 if (prop != null && prop.isDirty()) { 148 Serializable value = prop.getValue(); 149 if (fieldName.equals(getPasswordField())) { 150 value = PasswordHelper.hashPassword((String) value, passwordHashAlgorithm); 151 } 152 if (value instanceof Calendar) { 153 value = ((Calendar) value).getTime(); 154 } 155 fieldMap.put(prop.getName(), value); 156 } 157 if (getDirectory().isReference(fieldName)) { 158 referenceFieldList.add(fieldName); 159 } 160 } 161 162 String idFieldName = schemaFieldMap.get(getIdField()).getName().getPrefixedName(); 163 String id = docModel.getId(); 164 Document bson = MongoDBSerializationHelper.fieldMapToBson(idFieldName, autoincrementId ? Long.valueOf(id) : id); 165 166 List<Bson> updates = fieldMap.entrySet().stream().map(e -> Updates.set(e.getKey(), e.getValue())).collect( 167 Collectors.toList()); 168 169 if (!updates.isEmpty()) { 170 try { 171 UpdateResult result = getCollection().updateOne(bson, Updates.combine(updates)); 172 // Throw an error if no document matched the update 173 if (!result.wasAcknowledged()) { 174 throw new DirectoryException( 175 "Error while updating the entry, the request has not been acknowledged by the server"); 176 } 177 if (result.getMatchedCount() == 0) { 178 throw new DirectoryException( 179 String.format("Error while updating the entry, no document was found with the id %s", id)); 180 } 181 } catch (MongoWriteException e) { 182 throw new DirectoryException(e); 183 } 184 } 185 return referenceFieldList; 186 } 187 188 @Override 189 public void deleteEntryWithoutReferences(String id) throws DirectoryException { 190 try { 191 String idFieldName = schemaFieldMap.get(getIdField()).getName().getPrefixedName(); 192 DeleteResult result = getCollection().deleteOne( 193 MongoDBSerializationHelper.fieldMapToBson(idFieldName, autoincrementId ? Long.valueOf(id) : id)); 194 if (!result.wasAcknowledged()) { 195 throw new DirectoryException( 196 "Error while deleting the entry, the request has not been acknowledged by the server"); 197 } 198 } catch (MongoWriteException e) { 199 throw new DirectoryException(e); 200 } 201 } 202 203 @Override 204 public DocumentModelList query(Map<String, Serializable> filter, Set<String> fulltext, Map<String, String> orderBy, 205 boolean fetchReferences) throws DirectoryException { 206 return query(filter, fulltext, orderBy, fetchReferences, -1, 0); 207 } 208 209 @Override 210 public DocumentModelList query(Map<String, Serializable> filter, Set<String> fulltext, Map<String, String> orderBy, 211 boolean fetchReferences, int limit, int offset) throws DirectoryException { 212 213 if (!hasPermission(SecurityConstants.READ)) { 214 return new DocumentModelListImpl(); 215 } 216 217 // Remove password as it is not possible to do queries with it 218 filter.remove(getPasswordField()); 219 Document bson = buildQuery(filter, fulltext); 220 221 DocumentModelList entries = new DocumentModelListImpl(); 222 223 FindIterable<Document> results = getCollection().find(bson).skip(offset); 224 if (limit > 0) { 225 results.limit(limit); 226 } 227 for (Document resultDoc : results) { 228 229 // Cast object to document model 230 Map<String, Object> fieldMap = MongoDBSerializationHelper.bsonToFieldMap(resultDoc); 231 // Remove password from results 232 if (!readAllColumns) { 233 fieldMap.remove(getPasswordField()); 234 } 235 DocumentModel doc = fieldMapToDocumentModel(fieldMap); 236 237 if (fetchReferences) { 238 Map<String, List<String>> targetIdsMap = new HashMap<>(); 239 for (Reference reference : directory.getReferences()) { 240 List<String> targetIds; 241 if (reference instanceof MongoDBReference) { 242 MongoDBReference mongoReference = (MongoDBReference) reference; 243 targetIds = mongoReference.getTargetIdsForSource(doc.getId(), this); 244 } else { 245 targetIds = reference.getTargetIdsForSource(doc.getId()); 246 } 247 targetIds = new ArrayList<>(targetIds); 248 Collections.sort(targetIds); 249 String fieldName = reference.getFieldName(); 250 targetIdsMap.computeIfAbsent(fieldName, key -> new ArrayList<>()).addAll(targetIds); 251 } 252 for (Map.Entry<String, List<String>> entry : targetIdsMap.entrySet()) { 253 String fieldName = entry.getKey(); 254 List<String> targetIds = entry.getValue(); 255 try { 256 doc.setProperty(schemaName, fieldName, targetIds); 257 } catch (PropertyException e) { 258 throw new DirectoryException(e); 259 } 260 } 261 } 262 entries.add(doc); 263 } 264 265 if (orderBy != null && !orderBy.isEmpty()) { 266 getDirectory().orderEntries(entries, orderBy); 267 } 268 269 return entries; 270 } 271 272 protected Document buildQuery(Map<String, Serializable> fieldMap, Set<String> fulltext) { 273 274 Document bson = new Document(); 275 for (Map.Entry<String, Serializable> entry : fieldMap.entrySet()) { 276 Field field = schemaFieldMap.entrySet() 277 .stream() 278 .filter(e -> e.getValue().getName().getPrefixedName().equals(entry.getKey())) 279 .map(Map.Entry::getValue) 280 .findFirst() 281 .orElse(null); 282 283 Serializable v = entry.getValue(); 284 Object value = (field != null) ? MongoDBSerializationHelper.valueToBson(v, field.getType()) 285 : MongoDBSerializationHelper.valueToBson(v); 286 String key = entry.getKey(); 287 if (fulltext != null && fulltext.contains(key)) { 288 String val = String.valueOf(value); 289 switch (substringMatchType) { 290 case subany: 291 addField(bson, key, Pattern.compile(val, Pattern.CASE_INSENSITIVE)); 292 break; 293 case subinitial: 294 addField(bson, key, Pattern.compile('^' + val, Pattern.CASE_INSENSITIVE)); 295 break; 296 case subfinal: 297 addField(bson, key, Pattern.compile(val + '$', Pattern.CASE_INSENSITIVE)); 298 break; 299 } 300 } else { 301 addField(bson, key, value); 302 } 303 } 304 return bson; 305 } 306 307 protected void addField(Document bson, String key, Object value) { 308 String keyFieldName = key; 309 Field field = schemaFieldMap.get(key); 310 if (field != null) { 311 keyFieldName = field.getName().getPrefixedName(); 312 } 313 bson.put(keyFieldName, value); 314 } 315 316 @Override 317 public void close() throws DirectoryException { 318 client.close(); 319 getDirectory().removeSession(this); 320 } 321 322 @Override 323 public boolean authenticate(String username, String password) throws DirectoryException { 324 Document user = getCollection().find(MongoDBSerializationHelper.fieldMapToBson(getIdField(), username)).first(); 325 if (user == null) { 326 return false; 327 } 328 String storedPassword = user.getString(getPasswordField()); 329 return PasswordHelper.verifyPassword(password, storedPassword); 330 } 331 332 @Override 333 public boolean isAuthenticating() { 334 return schemaFieldMap.containsKey(getPasswordField()); 335 } 336 337 @Override 338 public boolean hasEntry(String id) { 339 return getCollection().count(MongoDBSerializationHelper.fieldMapToBson(getIdField(), id)) > 0; 340 } 341 342 /** 343 * Retrieve a collection 344 * 345 * @param collection the collection name 346 * @return the MongoDB collection 347 */ 348 public MongoCollection<Document> getCollection(String collection) { 349 return MongoDBConnectionHelper.getCollection(client, dbName, collection); 350 } 351 352 /** 353 * Retrieve the collection associated to this directory 354 * 355 * @return the MongoDB collection 356 */ 357 public MongoCollection<Document> getCollection() { 358 return getCollection(directoryName); 359 } 360 361 /** 362 * Check if the MongoDB server has the collection 363 * 364 * @param collection the collection name 365 * @return true if the server has the collection, false otherwise 366 */ 367 public boolean hasCollection(String collection) { 368 return MongoDBConnectionHelper.hasCollection(client, dbName, collection); 369 } 370 371 protected DocumentModel fieldMapToDocumentModel(Map<String, Object> fieldMap) { 372 String idFieldName = schemaFieldMap.get(getIdField()).getName().getPrefixedName(); 373 if (!fieldMap.containsKey(idFieldName)) { 374 idFieldName = getIdField(); 375 } 376 String id = String.valueOf(fieldMap.get(idFieldName)); 377 return createEntryModel(null, schemaName, id, fieldMap, isReadOnly()); 378 } 379 380}