001/* 002 * (C) Copyright 2014-2016 Nuxeo SA (http://nuxeo.com/) and others. 003 * 004 * Licensed under the Apache License, Version 2.0 (the "License"); 005 * you may not use this file except in compliance with the License. 006 * You may obtain a copy of the License at 007 * 008 * http://www.apache.org/licenses/LICENSE-2.0 009 * 010 * Unless required by applicable law or agreed to in writing, software 011 * distributed under the License is distributed on an "AS IS" BASIS, 012 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 013 * See the License for the specific language governing permissions and 014 * limitations under the License. 015 * 016 * Contributors: 017 * Tiry 018 * bdelbosc 019 */ 020 021package org.nuxeo.elasticsearch.core; 022 023import static org.nuxeo.elasticsearch.ElasticSearchConstants.CHILDREN_FIELD; 024import static org.nuxeo.elasticsearch.ElasticSearchConstants.INDEX_BULK_MAX_SIZE_PROPERTY; 025import static org.nuxeo.elasticsearch.ElasticSearchConstants.PATH_FIELD; 026 027import java.io.IOException; 028import java.util.HashSet; 029import java.util.List; 030import java.util.Set; 031 032import org.apache.commons.logging.Log; 033import org.apache.commons.logging.LogFactory; 034import org.elasticsearch.action.bulk.BulkItemResponse; 035import org.elasticsearch.action.bulk.BulkRequest; 036import org.elasticsearch.action.bulk.BulkResponse; 037import org.elasticsearch.action.delete.DeleteRequest; 038import org.elasticsearch.action.get.GetRequest; 039import org.elasticsearch.action.get.GetResponse; 040import org.elasticsearch.action.index.IndexRequest; 041import org.elasticsearch.action.search.SearchRequest; 042import org.elasticsearch.action.search.SearchResponse; 043import org.elasticsearch.action.search.SearchScrollRequest; 044import org.elasticsearch.common.bytes.BytesReference; 045import org.elasticsearch.common.io.stream.BytesStreamOutput; 046import org.elasticsearch.common.unit.TimeValue; 047import org.elasticsearch.common.xcontent.XContentType; 048import org.elasticsearch.index.VersionType; 049import org.elasticsearch.index.query.QueryBuilder; 050import org.elasticsearch.index.query.QueryBuilders; 051import org.elasticsearch.rest.RestStatus; 052import org.elasticsearch.search.SearchHit; 053import org.elasticsearch.search.builder.SearchSourceBuilder; 054import org.elasticsearch.search.fetch.subphase.FetchSourceContext; 055import org.nuxeo.ecm.core.api.ConcurrentUpdateException; 056import org.nuxeo.ecm.core.api.DocumentModel; 057import org.nuxeo.ecm.core.api.DocumentNotFoundException; 058import org.nuxeo.ecm.core.api.NuxeoException; 059import org.nuxeo.ecm.core.api.model.BlobNotFoundException; 060import org.nuxeo.elasticsearch.api.ElasticSearchIndexing; 061import org.nuxeo.elasticsearch.commands.IndexingCommand; 062import org.nuxeo.elasticsearch.commands.IndexingCommand.Type; 063import org.nuxeo.elasticsearch.io.JsonESDocumentWriter; 064import org.nuxeo.runtime.api.Framework; 065import org.nuxeo.runtime.metrics.MetricsService; 066 067import com.fasterxml.jackson.core.JsonFactory; 068import com.fasterxml.jackson.core.JsonGenerator; 069 070import io.dropwizard.metrics5.MetricName; 071import io.dropwizard.metrics5.MetricRegistry; 072import io.dropwizard.metrics5.SharedMetricRegistries; 073import io.dropwizard.metrics5.Timer; 074import io.dropwizard.metrics5.Timer.Context; 075 076/** 077 * @since 6.0 078 */ 079public class ElasticSearchIndexingImpl implements ElasticSearchIndexing { 080 private static final Log log = LogFactory.getLog(ElasticSearchIndexingImpl.class); 081 082 // debug curl line max size 083 private static final int MAX_CURL_LINE = 8 * 1024; 084 085 // send the bulk indexing command when this size is reached, optimal is 5-10m 086 private static final int DEFAULT_MAX_BULK_SIZE = 5 * 1024 * 1024; 087 088 private final ElasticSearchAdminImpl esa; 089 090 private final Timer deleteTimer; 091 092 private final Timer indexTimer; 093 094 private final Timer bulkIndexTimer; 095 096 private final boolean useExternalVersion; 097 098 private JsonESDocumentWriter jsonESDocumentWriter; 099 100 protected static final JsonFactory JSON_FACTORY = new JsonFactory(); 101 102 public ElasticSearchIndexingImpl(ElasticSearchAdminImpl esa) { 103 this.esa = esa; 104 MetricRegistry registry = SharedMetricRegistries.getOrCreate(MetricsService.class.getName()); 105 indexTimer = registry.timer(MetricName.build("nuxeo.elasticsearch.service.timer").tagged("service", "index")); 106 deleteTimer = registry.timer(MetricName.build("nuxeo.elasticsearch.service.timer").tagged("service", "delete")); 107 bulkIndexTimer = registry.timer(MetricName.build("nuxeo.elasticsearch.service.timer").tagged("service", "bulkIndex")); 108 this.jsonESDocumentWriter = new JsonESDocumentWriter();// default writer 109 this.useExternalVersion = esa.useExternalVersion(); 110 } 111 112 /** 113 * @since 7.2 114 */ 115 public ElasticSearchIndexingImpl(ElasticSearchAdminImpl esa, JsonESDocumentWriter jsonESDocumentWriter) { 116 this(esa); 117 this.jsonESDocumentWriter = jsonESDocumentWriter; 118 } 119 120 @Override 121 public void runIndexingWorker(List<IndexingCommand> cmds) { 122 throw new UnsupportedOperationException("Not implemented"); 123 } 124 125 @Override 126 public void runReindexingWorker(String repositoryName, String nxql, boolean syncAlias) { 127 throw new UnsupportedOperationException("Not implemented"); 128 } 129 130 @Override 131 public void reindexRepository(String repositoryName) { 132 throw new UnsupportedOperationException("Not implemented"); 133 } 134 135 @Override 136 public void indexNonRecursive(List<IndexingCommand> cmds) { 137 int nbCommands = cmds.size(); 138 if (nbCommands == 1) { 139 indexNonRecursive(cmds.get(0)); 140 return; 141 } 142 // simulate long indexing 143 // try {Thread.sleep(1000);} catch (InterruptedException e) { } 144 145 processBulkDeleteCommands(cmds); 146 try (Context ignored = bulkIndexTimer.time()) { 147 processBulkIndexCommands(cmds); 148 } 149 esa.totalCommandProcessed.addAndGet(nbCommands); 150 refreshIfNeeded(cmds); 151 } 152 153 void processBulkDeleteCommands(List<IndexingCommand> cmds) { 154 // Can be optimized with a single delete by query 155 for (IndexingCommand cmd : cmds) { 156 if (cmd.getType() == Type.DELETE) { 157 try (Context ignored = deleteTimer.time()) { 158 processDeleteCommand(cmd); 159 } 160 } 161 } 162 } 163 164 void processBulkIndexCommands(List<IndexingCommand> cmds) { 165 BulkRequest bulkRequest = new BulkRequest(); 166 Set<String> docIds = new HashSet<>(cmds.size()); 167 int bulkSize = 0; 168 final int maxBulkSize = getMaxBulkSize(); 169 for (IndexingCommand cmd : cmds) { 170 if (cmd.getType() == Type.DELETE || cmd.getType() == Type.UPDATE_DIRECT_CHILDREN) { 171 continue; 172 } 173 if (!docIds.add(cmd.getTargetDocumentId())) { 174 // do not submit the same doc 2 times 175 continue; 176 } 177 try { 178 IndexRequest idxRequest = buildEsIndexingRequest(cmd); 179 if (idxRequest != null) { 180 bulkSize += idxRequest.source().length(); 181 bulkRequest.add(idxRequest); 182 } 183 } catch (BlobNotFoundException be) { 184 log.info("Ignore indexing command in bulk, blob does not exists anymore: " + cmd); 185 } catch (ConcurrentUpdateException e) { 186 throw e; // bubble up, usually until AbstractWork catches it and maybe retries 187 } catch (DocumentNotFoundException e) { 188 log.info("Ignore indexing command in bulk, doc does not exists anymore: " + cmd); 189 } catch (IllegalArgumentException e) { 190 log.error("Ignore indexing command in bulk, fail to create request: " + cmd, e); 191 } 192 if (bulkSize > maxBulkSize) { 193 log.warn("Max bulk size reached " + bulkSize + ", sending bulk command"); 194 sendBulkCommand(bulkRequest, bulkSize); 195 bulkRequest = new BulkRequest(); 196 bulkSize = 0; 197 } 198 } 199 sendBulkCommand(bulkRequest, bulkSize); 200 } 201 202 int getMaxBulkSize() { 203 String value = Framework.getProperty(INDEX_BULK_MAX_SIZE_PROPERTY, String.valueOf(DEFAULT_MAX_BULK_SIZE)); 204 return Integer.parseInt(value); 205 } 206 207 void sendBulkCommand(BulkRequest bulkRequest, int bulkSize) { 208 if (bulkRequest.numberOfActions() > 0) { 209 if (log.isDebugEnabled()) { 210 logDebugMessageTruncated(String.format( 211 "Index %d docs (%d bytes) in bulk request: curl -XPOST 'http://localhost:9200/_bulk' -d '%s'", 212 bulkRequest.numberOfActions(), bulkSize, bulkRequest.requests().toString()), MAX_CURL_LINE); 213 } 214 BulkResponse response = esa.getClient().bulk(bulkRequest); 215 if (response.hasFailures()) { 216 logBulkFailure(response); 217 } 218 } 219 } 220 221 void logBulkFailure(BulkResponse response) { 222 boolean isError = false; 223 StringBuilder sb = new StringBuilder(); 224 sb.append("Ignore indexing of some docs more recent versions has already been indexed"); 225 for (BulkItemResponse item : response.getItems()) { 226 if (item.isFailed()) { 227 if (item.getFailure().getStatus() == RestStatus.CONFLICT) { 228 sb.append("\n ").append(item.getFailureMessage()); 229 } else { 230 isError = true; 231 } 232 } 233 } 234 if (isError) { 235 log.error(response.buildFailureMessage()); 236 } else { 237 log.debug(sb); 238 } 239 } 240 241 void refreshIfNeeded(List<IndexingCommand> cmds) { 242 for (IndexingCommand cmd : cmds) { 243 if (refreshIfNeeded(cmd)) 244 return; 245 } 246 } 247 248 boolean refreshIfNeeded(IndexingCommand cmd) { 249 if (cmd.isSync()) { 250 esa.refresh(); 251 return true; 252 } 253 return false; 254 } 255 256 @Override 257 public void indexNonRecursive(IndexingCommand cmd) { 258 Type type = cmd.getType(); 259 if (type == Type.UPDATE_DIRECT_CHILDREN) { 260 // the parent don't need to be indexed 261 return; 262 } 263 if (type == Type.DELETE) { 264 try (Context ignored = deleteTimer.time()) { 265 processDeleteCommand(cmd); 266 } 267 } else { 268 try (Context ignored = indexTimer.time()) { 269 processIndexCommand(cmd); 270 } 271 } 272 refreshIfNeeded(cmd); 273 esa.totalCommandProcessed.incrementAndGet(); 274 } 275 276 void processIndexCommand(IndexingCommand cmd) { 277 IndexRequest request; 278 try { 279 request = buildEsIndexingRequest(cmd); 280 } catch (BlobNotFoundException pe) { 281 request = null; 282 } catch (DocumentNotFoundException e) { 283 request = null; 284 } catch (IllegalStateException e) { 285 log.error("Fail to create request for indexing command: " + cmd, e); 286 return; 287 } 288 if (request == null) { 289 log.info("Cancel indexing command because target document does not exists anymore: " + cmd); 290 return; 291 } 292 if (log.isDebugEnabled()) { 293 logDebugMessageTruncated(String.format("Index request: curl -XPUT 'http://localhost:9200/%s/%s' -d '%s'", 294 getWriteIndexForRepository(cmd.getRepositoryName()), cmd.getTargetDocumentId(), 295 request.toString()), MAX_CURL_LINE); 296 } 297 try { 298 esa.getClient().index(request); 299 } catch (ConcurrentUpdateException e) { 300 log.info("Ignore indexing of doc " + cmd.getTargetDocumentId() 301 + " a more recent version has already been indexed: " + e.getMessage()); 302 } 303 } 304 305 void logDebugMessageTruncated(String msg, int maxSize) { 306 if (log.isTraceEnabled() || msg.length() < maxSize) { 307 // in trace mode we output the full message 308 log.debug(msg); 309 } else { 310 log.debug(msg.substring(0, maxSize) + "..."); 311 } 312 } 313 314 void processDeleteCommand(IndexingCommand cmd) { 315 if (cmd.isRecurse()) { 316 processDeleteCommandRecursive(cmd); 317 } else { 318 processDeleteCommandNonRecursive(cmd); 319 } 320 } 321 322 void processDeleteCommandNonRecursive(IndexingCommand cmd) { 323 String indexName = getWriteIndexForRepository(cmd.getRepositoryName()); 324 DeleteRequest request = new DeleteRequest(indexName, cmd.getTargetDocumentId()); 325 if (log.isDebugEnabled()) { 326 log.debug(String.format("Delete request: curl -XDELETE 'http://localhost:9200/%s/%s'", indexName, 327 cmd.getTargetDocumentId())); 328 } 329 esa.getClient().delete(request); 330 } 331 332 void processDeleteCommandRecursive(IndexingCommand cmd) { 333 String indexName = getWriteIndexForRepository(cmd.getRepositoryName()); 334 // we don't want to rely on target document because the document can be 335 // already removed 336 String docPath = getPathOfDocFromEs(cmd.getRepositoryName(), cmd.getTargetDocumentId()); 337 if (docPath == null) { 338 if (!Framework.isTestModeSet()) { 339 log.warn("Trying to delete a non existing doc: " + cmd.toString()); 340 } 341 return; 342 } 343 // Refresh index before bulk delete 344 esa.getClient().refresh(indexName); 345 346 // Run the scroll query 347 QueryBuilder query = QueryBuilders.constantScoreQuery(QueryBuilders.termQuery(CHILDREN_FIELD, docPath)); 348 TimeValue keepAlive = TimeValue.timeValueMinutes(1); 349 SearchSourceBuilder search = new SearchSourceBuilder().size(100).query(query).fetchSource(false); 350 SearchRequest request = new SearchRequest(indexName).scroll(keepAlive).source(search); 351 if (log.isDebugEnabled()) { 352 log.debug(String.format( 353 "Search with scroll request: curl -XGET 'http://localhost:9200/%s/_search?scroll=%s' -d '%s'", 354 indexName, keepAlive, query.toString())); 355 } 356 for (SearchResponse response = esa.getClient().search(request); // 357 response.getHits().getHits().length > 0; // 358 response = runNextScroll(response, keepAlive)) { 359 360 // Build bulk delete request 361 BulkRequest bulkRequest = new BulkRequest(); 362 for (SearchHit hit : response.getHits().getHits()) { 363 bulkRequest.add(new DeleteRequest(hit.getIndex(), hit.getId())); 364 } 365 if (log.isDebugEnabled()) { 366 log.debug(String.format("Bulk delete request on %s elements", bulkRequest.numberOfActions())); 367 } 368 // Run bulk delete request 369 esa.getClient().bulk(bulkRequest); 370 } 371 } 372 373 SearchResponse runNextScroll(SearchResponse response, TimeValue keepAlive) { 374 if (log.isDebugEnabled()) { 375 log.debug(String.format( 376 "Scroll request: -XGET 'localhost:9200/_search/scroll' -d '{\"scroll\": \"%s\", \"scroll_id\": \"%s\" }'", 377 keepAlive, response.getScrollId())); 378 } 379 SearchScrollRequest request = new SearchScrollRequest(response.getScrollId()).scroll(keepAlive); 380 return esa.getClient().searchScroll(request); 381 } 382 383 /** 384 * Return the ecm:path of an ES document or null if not found. 385 */ 386 String getPathOfDocFromEs(String repository, String docId) { 387 String indexName = getWriteIndexForRepository(repository); 388 GetRequest request = new GetRequest(indexName, docId).fetchSourceContext( 389 new FetchSourceContext(true, new String[] { PATH_FIELD }, null)); 390 if (log.isDebugEnabled()) { 391 log.debug(String.format("Get path of doc: curl -XGET 'http://localhost:9200/%s/%s?fields=%s'", indexName, 392 docId, PATH_FIELD)); 393 } 394 GetResponse ret = esa.getClient().get(request); 395 if (!ret.isExists() || ret.getSource() == null || ret.getSource().get(PATH_FIELD) == null) { 396 // doc not found 397 return null; 398 } 399 return ret.getSource().get(PATH_FIELD).toString(); 400 } 401 402 /** 403 * Return indexing request or null if the doc does not exists anymore. 404 * 405 * @throws java.lang.IllegalStateException if the command is not attached to a session 406 */ 407 IndexRequest buildEsIndexingRequest(IndexingCommand cmd) { 408 DocumentModel doc = cmd.getTargetDocument(); 409 if (doc == null) { 410 return null; 411 } 412 try { 413 IndexRequest request = new IndexRequest(getWriteIndexForRepository(cmd.getRepositoryName())).id( 414 cmd.getTargetDocumentId()).source(source(doc), XContentType.JSON); 415 if (useExternalVersion && cmd.getOrder() > 0) { 416 request.versionType(VersionType.EXTERNAL).version(cmd.getOrder()); 417 } 418 return request; 419 } catch (IOException e) { 420 throw new NuxeoException("Unable to create index request for Document " + cmd.getTargetDocumentId(), e); 421 } 422 } 423 424 protected String getWriteIndexForRepository(String repository) { 425 return esa.getWriteIndexName(esa.getIndexNameForRepository(repository)); 426 } 427 428 @Override 429 public BytesReference source(DocumentModel doc) throws IOException { 430 BytesStreamOutput out = new BytesStreamOutput(); 431 try (JsonGenerator jsonGen = JSON_FACTORY.createGenerator(out)) { 432 jsonESDocumentWriter.writeESDocument(jsonGen, doc, null, null); 433 return out.bytes(); 434 } 435 } 436}