001/* 002 * (C) Copyright 2017 Nuxeo SA (http://nuxeo.com/) and others. 003 * 004 * Licensed under the Apache License, Version 2.0 (the "License"); 005 * you may not use this file except in compliance with the License. 006 * You may obtain a copy of the License at 007 * 008 * http://www.apache.org/licenses/LICENSE-2.0 009 * 010 * Unless required by applicable law or agreed to in writing, software 011 * distributed under the License is distributed on an "AS IS" BASIS, 012 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 013 * See the License for the specific language governing permissions and 014 * limitations under the License. 015 * 016 * Contributors: 017 * Funsho David 018 * 019 */ 020 021package org.nuxeo.directory.mongodb; 022 023import static org.nuxeo.mongodb.core.MongoDBSerializationHelper.MONGODB_ID; 024import static org.nuxeo.mongodb.core.MongoDBSerializationHelper.MONGODB_INC; 025import static org.nuxeo.mongodb.core.MongoDBSerializationHelper.MONGODB_SEQ; 026import static org.nuxeo.mongodb.core.MongoDBSerializationHelper.MONGODB_SET; 027 028import java.io.Serializable; 029import java.util.ArrayList; 030import java.util.Collections; 031import java.util.HashMap; 032import java.util.LinkedList; 033import java.util.List; 034import java.util.Map; 035import java.util.Set; 036import java.util.regex.Pattern; 037 038import com.mongodb.client.model.FindOneAndUpdateOptions; 039import com.mongodb.client.model.ReturnDocument; 040import org.apache.commons.lang.StringUtils; 041import org.apache.commons.logging.Log; 042import org.apache.commons.logging.LogFactory; 043import org.bson.Document; 044import org.nuxeo.ecm.core.api.DocumentModel; 045import org.nuxeo.ecm.core.api.DocumentModelList; 046import org.nuxeo.ecm.core.api.PropertyException; 047import org.nuxeo.ecm.core.api.impl.DocumentModelListImpl; 048import org.nuxeo.ecm.core.api.model.Property; 049import org.nuxeo.ecm.core.api.security.SecurityConstants; 050import org.nuxeo.ecm.core.schema.types.Field; 051import org.nuxeo.ecm.directory.BaseDirectoryDescriptor.SubstringMatchType; 052import org.nuxeo.ecm.directory.BaseSession; 053import org.nuxeo.ecm.directory.DirectoryException; 054import org.nuxeo.ecm.directory.EntrySource; 055import org.nuxeo.ecm.directory.PasswordHelper; 056import org.nuxeo.ecm.directory.Reference; 057import org.nuxeo.ecm.directory.Session; 058import org.nuxeo.mongodb.core.MongoDBConnectionHelper; 059import org.nuxeo.mongodb.core.MongoDBSerializationHelper; 060 061import com.mongodb.MongoClient; 062import com.mongodb.MongoWriteException; 063import com.mongodb.client.FindIterable; 064import com.mongodb.client.MongoCollection; 065import com.mongodb.client.result.DeleteResult; 066import com.mongodb.client.result.UpdateResult; 067 068/** 069 * MongoDB implementation of a {@link Session} 070 * 071 * @since 9.1 072 */ 073public class MongoDBSession extends BaseSession implements EntrySource { 074 075 private static final Log log = LogFactory.getLog(MongoDBSession.class); 076 077 protected MongoClient client; 078 079 protected String dbName; 080 081 protected String schemaName; 082 083 protected String directoryName; 084 085 protected SubstringMatchType substringMatchType; 086 087 protected String countersCollectionName; 088 089 protected final Map<String, Field> schemaFieldMap; 090 091 protected final String passwordHashAlgorithm; 092 093 protected final boolean autoincrementId; 094 095 public MongoDBSession(MongoDBDirectory directory) { 096 super(directory); 097 MongoDBDirectoryDescriptor desc = directory.getDescriptor(); 098 client = MongoDBConnectionHelper.newMongoClient(desc.getServerUrl()); 099 dbName = desc.getDatabaseName(); 100 directoryName = directory.getName(); 101 countersCollectionName = directory.getCountersCollectionName(); 102 schemaName = directory.getSchema(); 103 substringMatchType = desc.getSubstringMatchType(); 104 schemaFieldMap = directory.getSchemaFieldMap(); 105 autoincrementId = desc.isAutoincrementIdField(); 106 passwordHashAlgorithm = desc.passwordHashAlgorithm; 107 } 108 109 @Override 110 public MongoDBDirectory getDirectory() { 111 return (MongoDBDirectory) directory; 112 } 113 114 @Override 115 public DocumentModel getEntry(String id) throws DirectoryException { 116 return getEntry(id, true); 117 } 118 119 @Override 120 public DocumentModel getEntry(String id, boolean fetchReferences) throws DirectoryException { 121 if (!hasPermission(SecurityConstants.READ)) { 122 return null; 123 } 124 return directory.getCache().getEntry(id, this, fetchReferences); 125 } 126 127 @Override 128 public DocumentModelList getEntries() throws DirectoryException { 129 if (!hasPermission(SecurityConstants.READ)) { 130 return new DocumentModelListImpl(); 131 } 132 return query(Collections.emptyMap()); 133 } 134 135 @Override 136 public DocumentModel createEntry(Map<String, Object> fieldMap) throws DirectoryException { 137 checkPermission(SecurityConstants.WRITE); 138 String id; 139 if (autoincrementId) { 140 Document filter = MongoDBSerializationHelper.fieldMapToBson(MONGODB_ID, directoryName); 141 Document update = new Document().append(MONGODB_INC, 142 MongoDBSerializationHelper.fieldMapToBson(MONGODB_SEQ, 1)); 143 FindOneAndUpdateOptions options = new FindOneAndUpdateOptions().returnDocument(ReturnDocument.AFTER); 144 Long longId = getCollection(countersCollectionName).findOneAndUpdate(filter, update, options) 145 .getLong(MONGODB_SEQ); 146 fieldMap.put(getIdField(), longId); 147 id = String.valueOf(longId); 148 } else { 149 id = String.valueOf(fieldMap.get(getIdField())); 150 if (hasEntry(id)) { 151 throw new DirectoryException(String.format("Entry with id %s already exists", id)); 152 } 153 } 154 if (fieldMap.get(getPasswordField()) != null) { 155 String password = (String) fieldMap.get(getPasswordField()); 156 password = PasswordHelper.hashPassword(password, passwordHashAlgorithm); 157 fieldMap.put(getPasswordField(), password); 158 } 159 try { 160 Document bson = MongoDBSerializationHelper.fieldMapToBson(fieldMap); 161 getCollection().insertOne(bson); 162 163 DocumentModel docModel = BaseSession.createEntryModel(null, schemaName, id, fieldMap, isReadOnly()); 164 165 // Add references fields 166 Field schemaIdField = schemaFieldMap.get(getIdField()); 167 String idFieldName = schemaIdField.getName().getPrefixedName(); 168 169 String sourceId = docModel.getId(); 170 for (Reference reference : getDirectory().getReferences()) { 171 String referenceFieldName = schemaFieldMap.get(reference.getFieldName()).getName().getPrefixedName(); 172 if (getDirectory().getReferences(reference.getFieldName()).size() > 1) { 173 log.warn("Directory " + getDirectory().getName() + " cannot create field " 174 + reference.getFieldName() + " for entry " + fieldMap.get(idFieldName) 175 + ": this field is associated with more than one reference"); 176 continue; 177 } 178 179 @SuppressWarnings("unchecked") 180 List<String> targetIds = (List<String>) fieldMap.get(referenceFieldName); 181 if (reference instanceof MongoDBReference) { 182 MongoDBReference mongodbReference = (MongoDBReference) reference; 183 mongodbReference.addLinks(sourceId, targetIds, this); 184 } else { 185 reference.addLinks(sourceId, targetIds); 186 } 187 } 188 189 getDirectory().invalidateCaches(); 190 return docModel; 191 } catch (MongoWriteException e) { 192 throw new DirectoryException(e); 193 } 194 } 195 196 @Override 197 public void updateEntry(DocumentModel docModel) throws DirectoryException { 198 checkPermission(SecurityConstants.WRITE); 199 Map<String, Object> fieldMap = new HashMap<>(); 200 List<String> referenceFieldList = new LinkedList<>(); 201 202 for (String fieldName : schemaFieldMap.keySet()) { 203 Property prop = docModel.getPropertyObject(schemaName, fieldName); 204 if (fieldName.equals(getPasswordField()) && StringUtils.isEmpty((String) prop.getValue())) { 205 continue; 206 } 207 if (prop != null && prop.isDirty()) { 208 Serializable value = prop.getValue(); 209 if (fieldName.equals(getPasswordField())) { 210 String password = (String) fieldMap.get(getPasswordField()); 211 value = PasswordHelper.hashPassword(password, passwordHashAlgorithm); 212 } 213 fieldMap.put(prop.getName(), value); 214 } 215 if (getDirectory().isReference(fieldName)) { 216 referenceFieldList.add(fieldName); 217 } 218 } 219 220 String id = docModel.getId(); 221 Document bson = MongoDBSerializationHelper.fieldMapToBson(getIdField(), id); 222 223 Document props = new Document(); 224 props.append(MONGODB_SET, MongoDBSerializationHelper.fieldMapToBson(fieldMap)); 225 226 try { 227 UpdateResult result = getCollection().updateOne(bson, props); 228 // Throw an error if no document matched the update 229 if (!result.wasAcknowledged()) { 230 throw new DirectoryException( 231 "Error while updating the entry, the request has not been acknowledged by the server"); 232 } 233 if (result.getMatchedCount() == 0) { 234 throw new DirectoryException( 235 String.format("Error while updating the entry, no document was found with the id %s", id)); 236 } 237 } catch (MongoWriteException e) { 238 throw new DirectoryException(e); 239 } 240 241 // update reference fields 242 for (String referenceFieldName : referenceFieldList) { 243 List<Reference> references = directory.getReferences(referenceFieldName); 244 if (references.size() > 1) { 245 // not supported 246 log.warn("Directory " + getDirectory().getName() + " cannot update field " + referenceFieldName 247 + " for entry " + docModel.getId() + ": this field is associated with more than one reference"); 248 } else { 249 Reference reference = references.get(0); 250 @SuppressWarnings("unchecked") 251 List<String> targetIds = (List<String>) docModel.getProperty(schemaName, referenceFieldName); 252 if (reference instanceof MongoDBReference) { 253 MongoDBReference mongoReference = (MongoDBReference) reference; 254 mongoReference.setTargetIdsForSource(docModel.getId(), targetIds, this); 255 } else { 256 reference.setTargetIdsForSource(docModel.getId(), targetIds); 257 } 258 } 259 } 260 getDirectory().invalidateCaches(); 261 262 } 263 264 @Override 265 public void deleteEntry(DocumentModel docModel) throws DirectoryException { 266 deleteEntry(docModel.getId()); 267 } 268 269 @Override 270 public void deleteEntry(String id) throws DirectoryException { 271 checkPermission(SecurityConstants.WRITE); 272 checkDeleteConstraints(id); 273 274 for (Reference reference : getDirectory().getReferences()) { 275 if (reference instanceof MongoDBReference) { 276 MongoDBReference mongoDBReference = (MongoDBReference) reference; 277 mongoDBReference.removeLinksForSource(id, this); 278 } else { 279 reference.removeLinksForSource(id); 280 } 281 } 282 283 try { 284 DeleteResult result = getCollection().deleteOne( 285 MongoDBSerializationHelper.fieldMapToBson(getIdField(), id)); 286 if (!result.wasAcknowledged()) { 287 throw new DirectoryException( 288 "Error while deleting the entry, the request has not been acknowledged by the server"); 289 } 290 } catch (MongoWriteException e) { 291 throw new DirectoryException(e); 292 } 293 getDirectory().invalidateCaches(); 294 } 295 296 @Override 297 public void deleteEntry(String id, Map<String, String> map) throws DirectoryException { 298 // TODO deprecate this as it's unused 299 deleteEntry(id); 300 } 301 302 @Override 303 public DocumentModelList query(Map<String, Serializable> filter) throws DirectoryException { 304 return query(filter, Collections.emptySet()); 305 } 306 307 @Override 308 public DocumentModelList query(Map<String, Serializable> filter, Set<String> fulltext) throws DirectoryException { 309 return query(filter, fulltext, new HashMap<>()); 310 } 311 312 @Override 313 public DocumentModelList query(Map<String, Serializable> filter, Set<String> fulltext, Map<String, String> orderBy) 314 throws DirectoryException { 315 return query(filter, fulltext, orderBy, false); 316 } 317 318 @Override 319 public DocumentModelList query(Map<String, Serializable> filter, Set<String> fulltext, Map<String, String> orderBy, 320 boolean fetchReferences) throws DirectoryException { 321 return query(filter, fulltext, orderBy, fetchReferences, -1, -1); 322 } 323 324 @Override 325 public DocumentModelList query(Map<String, Serializable> filter, Set<String> fulltext, Map<String, String> orderBy, 326 boolean fetchReferences, int limit, int offset) throws DirectoryException { 327 328 Document bson = buildQuery(filter, fulltext); 329 330 DocumentModelList entries = new DocumentModelListImpl(); 331 332 FindIterable<Document> results = getCollection().find(bson).skip(offset); 333 if (limit > 0) { 334 results.limit(limit); 335 } 336 for (Document resultDoc : results) { 337 338 // Cast object to document model 339 Map<String, Object> fieldMap = MongoDBSerializationHelper.bsonToFieldMap(resultDoc); 340 DocumentModel doc = fieldMapToDocumentModel(fieldMap); 341 342 if (fetchReferences) { 343 Map<String, List<String>> targetIdsMap = new HashMap<>(); 344 for (Reference reference : directory.getReferences()) { 345 List<String> targetIds; 346 if (reference instanceof MongoDBReference) { 347 MongoDBReference mongoReference = (MongoDBReference) reference; 348 targetIds = mongoReference.getTargetIdsForSource(doc.getId(), this); 349 } else { 350 targetIds = reference.getTargetIdsForSource(doc.getId()); 351 } 352 targetIds = new ArrayList<>(targetIds); 353 Collections.sort(targetIds); 354 String fieldName = reference.getFieldName(); 355 targetIdsMap.computeIfAbsent(fieldName, key -> new ArrayList<>()).addAll(targetIds); 356 } 357 for (Map.Entry<String, List<String>> entry : targetIdsMap.entrySet()) { 358 String fieldName = entry.getKey(); 359 List<String> targetIds = entry.getValue(); 360 try { 361 doc.setProperty(schemaName, fieldName, targetIds); 362 } catch (PropertyException e) { 363 throw new DirectoryException(e); 364 } 365 } 366 } 367 entries.add(doc); 368 } 369 370 if (orderBy != null && !orderBy.isEmpty()) { 371 getDirectory().orderEntries(entries, orderBy); 372 } 373 374 return entries; 375 } 376 377 protected Document buildQuery(Map<String, Serializable> fieldMap, Set<String> fulltext) { 378 379 Document bson = new Document(); 380 for (Map.Entry<String, Serializable> entry : fieldMap.entrySet()) { 381 Object value = MongoDBSerializationHelper.valueToBson(entry.getValue()); 382 if (value != null) { 383 String key = entry.getKey(); 384 if (fulltext.contains(key)) { 385 String val = String.valueOf(value); 386 switch (substringMatchType) { 387 case subany: 388 addField(bson, key, Pattern.compile(val, Pattern.CASE_INSENSITIVE)); 389 break; 390 case subinitial: 391 addField(bson, key, Pattern.compile('^' + val, Pattern.CASE_INSENSITIVE)); 392 break; 393 case subfinal: 394 addField(bson, key, Pattern.compile(val + '$', Pattern.CASE_INSENSITIVE)); 395 break; 396 } 397 } else { 398 addField(bson, key, value); 399 } 400 } 401 } 402 return bson; 403 } 404 405 protected void addField(Document bson, String key, Object value) { 406 bson.put(key, value); 407 } 408 409 @Override 410 public void close() throws DirectoryException { 411 client.close(); 412 getDirectory().removeSession(this); 413 } 414 415 @Override 416 public List<String> getProjection(Map<String, Serializable> filter, String columnName) throws DirectoryException { 417 return getProjection(filter, Collections.emptySet(), columnName); 418 } 419 420 @Override 421 public List<String> getProjection(Map<String, Serializable> filter, Set<String> fulltext, String columnName) 422 throws DirectoryException { 423 DocumentModelList docList = query(filter, fulltext); 424 List<String> result = new ArrayList<>(); 425 for (DocumentModel docModel : docList) { 426 Object obj = docModel.getProperty(schemaName, columnName); 427 String propValue = String.valueOf(obj); 428 result.add(propValue); 429 } 430 return result; 431 } 432 433 @Override 434 public boolean authenticate(String username, String password) throws DirectoryException { 435 Document user = getCollection().find(MongoDBSerializationHelper.fieldMapToBson(getIdField(), username)).first(); 436 String storedPassword = user.getString(getPasswordField()); 437 return PasswordHelper.verifyPassword(password, storedPassword); 438 } 439 440 @Override 441 public boolean hasEntry(String id) { 442 return getCollection().count(MongoDBSerializationHelper.fieldMapToBson(getIdField(), id)) > 0; 443 } 444 445 @Override 446 public DocumentModel createEntry(DocumentModel documentModel) { 447 return createEntry(documentModel.getProperties(schemaName)); 448 } 449 450 @Override 451 public DocumentModel getEntryFromSource(String id, boolean fetchReferences) throws DirectoryException { 452 return query(Collections.singletonMap(getIdField(), id), Collections.emptySet(), Collections.emptyMap(), 453 fetchReferences, 1, -1).get(0); 454 455 } 456 457 /** 458 * Retrieve a collection 459 * 460 * @param collection the collection name 461 * @return the MongoDB collection 462 */ 463 public MongoCollection<Document> getCollection(String collection) { 464 return MongoDBConnectionHelper.getCollection(client, dbName, collection); 465 } 466 467 /** 468 * Retrieve the collection associated to this directory 469 * 470 * @return the MongoDB collection 471 */ 472 public MongoCollection<Document> getCollection() { 473 return getCollection(directoryName); 474 } 475 476 /** 477 * Check if the MongoDB server has the collection 478 * 479 * @param collection the collection name 480 * @return true if the server has the collection, false otherwise 481 */ 482 public boolean hasCollection(String collection) { 483 return MongoDBConnectionHelper.hasCollection(client, dbName, collection); 484 } 485 486 protected DocumentModel fieldMapToDocumentModel(Map<String, Object> fieldMap) { 487 String id = String.valueOf(fieldMap.get(getIdField())); 488 DocumentModel docModel = BaseSession.createEntryModel(null, schemaName, id, fieldMap, isReadOnly()); 489 return docModel; 490 } 491 492}