Skip to content

Commit

Permalink
refactoring
Browse files Browse the repository at this point in the history
Issue imixs#554
  • Loading branch information
rsoika committed Aug 30, 2019
1 parent d25e574 commit 3d4353c
Show file tree
Hide file tree
Showing 11 changed files with 251 additions and 312 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -40,12 +40,12 @@
import java.util.Map;
import java.util.StringTokenizer;
import java.util.Vector;
import java.util.logging.Level;
import java.util.logging.Logger;

import javax.annotation.Resource;
import javax.annotation.security.DeclareRoles;
import javax.annotation.security.RolesAllowed;
import javax.ejb.EJB;
import javax.ejb.LocalBean;
import javax.ejb.SessionContext;
import javax.ejb.Stateless;
Expand All @@ -61,11 +61,12 @@
import org.imixs.workflow.ItemCollection;
import org.imixs.workflow.WorkflowKernel;
import org.imixs.workflow.engine.index.SearchService;
import org.imixs.workflow.engine.index.UpdateService;
import org.imixs.workflow.engine.index.SortOrder;
import org.imixs.workflow.engine.index.UpdateService;
import org.imixs.workflow.engine.jpa.Document;
import org.imixs.workflow.exceptions.AccessDeniedException;
import org.imixs.workflow.exceptions.InvalidAccessException;
import org.imixs.workflow.exceptions.PluginException;
import org.imixs.workflow.exceptions.QueryException;

/**
Expand Down Expand Up @@ -135,6 +136,10 @@ public class DocumentService {

public static final String ACCESSLEVEL_MANAGERACCESS = "org.imixs.ACCESSLEVEL.MANAGERACCESS";

public static final String EVENTLOG_TOPIC_INDEX_ADD = "index.add";
public static final String EVENTLOG_TOPIC_INDEX_REMOVE = "index.remove";


public static final String READACCESS = "$readaccess";
public static final String WRITEACCESS = "$writeaccess";
public static final String ISAUTHOR = "$isAuthor";
Expand Down Expand Up @@ -166,8 +171,11 @@ public class DocumentService {
private UpdateService indexUpdateService;

@Inject
private SearchService luceneSearchService;
private SearchService indexSearchService;

@Inject
private EventLogService eventLogService;

@Inject
protected Event<DocumentEvent> documentEvents;

Expand Down Expand Up @@ -473,10 +481,10 @@ public ItemCollection save(ItemCollection document) throws AccessDeniedException

// add/update document into lucene index
if (!document.getItemValueBoolean(NOINDEX)) {
indexUpdateService.updateDocument(document);
addDocumentToIndex(document);
} else {
// remove from index
indexUpdateService.removeDocument(document.getUniqueID());
removeDocumentFromIndex(document.getUniqueID());
}

/*
Expand All @@ -490,6 +498,50 @@ public ItemCollection save(ItemCollection document) throws AccessDeniedException
// return the updated document
return document;
}



/**
* This method adds a single document into the to the Lucene index. Before the
* document is added to the index, a new eventLog is created. The document will
* be indexed after the method flushEventLog is called. This method is called by
* the LuceneSearchService finder methods.
* <p>
* The method supports committed read. This means that a running transaction
* will not read an uncommitted document from the Lucene index.
*
*
* @param documentContext
*/
public void addDocumentToIndex(ItemCollection document) {
// skip if the flag 'noindex' = true
if (!document.getItemValueBoolean(DocumentService.NOINDEX)) {
// write a new EventLog entry for each document....
eventLogService.createEvent(EVENTLOG_TOPIC_INDEX_ADD, document.getUniqueID());
}
}

/**
* This method adds a new eventLog for a document to be deleted from the index.
* The document will be removed from the index after the method fluschEventLog
* is called. This method is called by the LuceneSearchService finder method
* only.
*
*
* @param uniqueID
* of the workitem to be removed
* @throws PluginException
*/
public void removeDocumentFromIndex(String uniqueID) {

long ltime = System.currentTimeMillis();
eventLogService.createEvent(EVENTLOG_TOPIC_INDEX_REMOVE, uniqueID);
if (logger.isLoggable(Level.FINE)) {
logger.fine("... update eventLog cache in " + (System.currentTimeMillis() - ltime)
+ " ms (1 document to be removed)");
}
}


/**
* This method saves a workitem in a new transaction. The method can be used by
Expand Down Expand Up @@ -619,7 +671,7 @@ public void remove(ItemCollection document) throws AccessDeniedException {
manager.remove(persistedDocument);
// remove document form index - @see issue #412
if (!document.getItemValueBoolean(NOINDEX)) {
indexUpdateService.removeDocument(document.getUniqueID());
removeDocumentFromIndex(document.getUniqueID());
}

} else
Expand Down Expand Up @@ -665,7 +717,8 @@ public int count(String searchTerm) throws QueryException {
* in case the searchterm is not understandable.
*/
public int count(String sSearchTerm, int maxResult) throws QueryException {
return luceneSearchService.getTotalHits(sSearchTerm, maxResult, null);
indexUpdateService.updateIndex();
return indexSearchService.getTotalHits(sSearchTerm, maxResult, null);
}

/**
Expand Down Expand Up @@ -749,7 +802,10 @@ public List<ItemCollection> find(String searchTerm, int pageSize, int pageIndex,
sortOrder = new SortOrder(sortBy, sortReverse);
}

return luceneSearchService.search(searchTerm, pageSize, pageIndex, sortOrder, null);
// flush eventlog (see issue #411)
indexUpdateService.updateIndex();

return indexSearchService.search(searchTerm, pageSize, pageIndex, sortOrder, null,false);

}

Expand Down Expand Up @@ -796,8 +852,12 @@ public List<ItemCollection> findStubs(String searchTerm, int pageSize, int pageI
// it would be possible if we use a SortedSetSortField class here
sortOrder = new SortOrder(sortBy,sortReverse);
}

// flush eventlog (see issue #411)
indexUpdateService.updateIndex();

// find stubs only!
return luceneSearchService.search(searchTerm, pageSize, pageIndex, sortOrder, null, true);
return indexSearchService.search(searchTerm, pageSize, pageIndex, sortOrder, null, true);

}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
import javax.annotation.Resource;
import javax.annotation.security.DeclareRoles;
import javax.annotation.security.RunAs;
import javax.ejb.EJB;
import javax.ejb.LocalBean;
import javax.ejb.SessionContext;
import javax.ejb.Stateless;
Expand Down Expand Up @@ -81,7 +80,7 @@
public class AdminPService {

public static final String JOB_RENAME_USER = "RENAME_USER";
public static final String JOB_REBUILD_LUCENE_INDEX = "REBUILD_LUCENE_INDEX";
public static final String JOB_REBUILD_INDEX ="JOB_REBUILD_INDEX";
public static final String JOB_UPGRADE = "UPGRADE";
public static final String JOB_MIGRATION = "MIGRATION";
private static final int DEFAULT_INTERVAL = 1;
Expand All @@ -101,6 +100,8 @@ public class AdminPService {
@Inject
JobHandlerRenameUser jobHandlerRenameUser;

@Inject
JobHandlerRebuildIndex jobHandlerRebuildIndex;

@Inject
@Any
Expand Down Expand Up @@ -226,6 +227,10 @@ public void scheduleTimer(javax.ejb.Timer timer) {
if (job.equals(JOB_UPGRADE)) {
jobHandler = jobHandlerUpgradeWorkitems;
}

if (job.equals(JOB_REBUILD_INDEX) || job.equals("REBUILD_LUCENE_INDEX")) {
jobHandler = jobHandlerRebuildIndex;
}

if (jobHandler == null) {
// try to find the jobHandler by CDI .....
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@

import org.eclipse.microprofile.config.inject.ConfigProperty;
import org.imixs.workflow.ItemCollection;
import org.imixs.workflow.engine.index.UpdateService;
import org.imixs.workflow.engine.jpa.Document;
import org.imixs.workflow.engine.lucene.LuceneIndexService;
import org.imixs.workflow.exceptions.AccessDeniedException;
import org.imixs.workflow.exceptions.InvalidAccessException;
import org.imixs.workflow.exceptions.PluginException;
Expand Down Expand Up @@ -62,7 +62,7 @@ public class JobHandlerRebuildIndex implements JobHandler {
private EntityManager manager;

@Inject
LuceneIndexService luceneIndexService;
UpdateService updateService;

private static Logger logger = Logger.getLogger(JobHandlerRebuildIndex.class.getName());

Expand Down Expand Up @@ -91,9 +91,9 @@ public ItemCollection run(ItemCollection adminp) throws AdminPException {
int blockCount = 0;

// read blocksize and timeout....
logger.info("...Job " + AdminPService.JOB_REBUILD_LUCENE_INDEX + " (" + adminp.getUniqueID()
logger.info("...Job " + AdminPService.JOB_REBUILD_INDEX + " (" + adminp.getUniqueID()
+ ") - lucene.rebuild.block_size=" + block_size);
logger.info("...Job " + AdminPService.JOB_REBUILD_LUCENE_INDEX + " (" + adminp.getUniqueID()
logger.info("...Job " + AdminPService.JOB_REBUILD_INDEX + " (" + adminp.getUniqueID()
+ ") - lucene.rebuild.time_out=" + time_out);

try {
Expand All @@ -116,7 +116,7 @@ public ItemCollection run(ItemCollection adminp) throws AdminPException {
}

// update the index
luceneIndexService.updateDocumentsUncommitted(resultList);
updateService.updateIndex(resultList);
manager.flush();

// update count
Expand All @@ -127,7 +127,7 @@ public ItemCollection run(ItemCollection adminp) throws AdminPException {
if (time == 0) {
time = 1;
}
logger.info("...Job " + AdminPService.JOB_REBUILD_LUCENE_INDEX + " (" + adminp.getUniqueID()
logger.info("...Job " + AdminPService.JOB_REBUILD_INDEX + " (" + adminp.getUniqueID()
+ ") - ..." + totalCount + " documents indexed in " + time + " sec. ... ");
blockCount = 0;
}
Expand All @@ -143,7 +143,7 @@ public ItemCollection run(ItemCollection adminp) throws AdminPException {
time = 1;
}
if (time > time_out) { // suspend after 2 mintues (default 120)....
logger.info("...Job " + AdminPService.JOB_REBUILD_LUCENE_INDEX + " (" + adminp.getUniqueID()
logger.info("...Job " + AdminPService.JOB_REBUILD_INDEX + " (" + adminp.getUniqueID()
+ ") - suspended: " + totalCount + " documents indexed in " + time + " sec. ");

adminp.replaceItemValue("_syncpoint", syncPoint);
Expand All @@ -156,7 +156,7 @@ public ItemCollection run(ItemCollection adminp) throws AdminPException {
}
} catch (Exception e) {
// print exception and stop job
logger.severe("...Job " + AdminPService.JOB_REBUILD_LUCENE_INDEX + " (" + adminp.getUniqueID()
logger.severe("...Job " + AdminPService.JOB_REBUILD_INDEX + " (" + adminp.getUniqueID()
+ ") - failed - " + e.getMessage() + " last syncpoint " + syncPoint + " - " + totalCount
+ " documents reindexed....");
e.printStackTrace();
Expand All @@ -177,7 +177,7 @@ public ItemCollection run(ItemCollection adminp) throws AdminPException {
if (time == 0) {
time = 1;
}
logger.info("...Job " + AdminPService.JOB_REBUILD_LUCENE_INDEX + " (" + adminp.getUniqueID() + ") - Finished: "
logger.info("...Job " + AdminPService.JOB_REBUILD_INDEX + " (" + adminp.getUniqueID() + ") - Finished: "
+ totalCount + " documents indexed in " + time + " sec. ");

adminp.replaceItemValue(JobHandler.ISCOMPLETED, true);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ public interface SearchService {
* @return collection of search result
* @throws QueryException
*/
public List<ItemCollection> search(String sSearchTerm) throws QueryException;
//public List<ItemCollection> search(String sSearchTerm) throws QueryException;

/**
* Returns a collection of documents matching the provided search term. The
Expand All @@ -73,7 +73,7 @@ public interface SearchService {
* @return collection of search result
* @throws QueryException
*/
public List<ItemCollection> search(String sSearchTerm, int pageSize, int pageIndex) throws QueryException;
//public List<ItemCollection> search(String sSearchTerm, int pageSize, int pageIndex) throws QueryException;

/**
* Returns a collection of documents matching the provided search term. The term
Expand All @@ -88,8 +88,8 @@ public interface SearchService {
* <p>
*
*/
public List<ItemCollection> search(String sSearchTerm, int pageSize, int pageIndex, SortOrder sortOrder,
DefaultOperator defaultOperator) throws QueryException;
// public List<ItemCollection> search(String sSearchTerm, int pageSize, int pageIndex, SortOrder sortOrder,
// DefaultOperator defaultOperator) throws QueryException;

/**
* Returns a collection of documents matching the provided search term. The term
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,19 +28,23 @@
package org.imixs.workflow.engine.index;

import java.util.Arrays;
import java.util.Collection;
import java.util.List;

import javax.ejb.Local;

import org.imixs.workflow.ItemCollection;
import org.imixs.workflow.exceptions.IndexException;
import org.imixs.workflow.exceptions.PluginException;

/**
* The UpdateService provides methods to write Imixs Workitems into a search
* index. An ItemCollection can be added into the index by calling themethod
* <code>updateDocument()</code>
* The UpdateService defines methods to update the search index. These methods
* are called by the DocuentService.
* <p>
* The method updateIndex(documents) writes documents immediately into
* the index.
* <p>
* The method updateIndex() updates the search index based on the eventLog.
* <p>
* The UpdateService provides also the default index schema.
*
* @see SchemaService
* @version 1.0
Expand All @@ -61,46 +65,26 @@ public interface UpdateService {
"$lasteventdate", "$creator", "$editor", "$lasteditor", "$owner", "namowner");

/**
* This method adds a single document into the to the Lucene index. Before the
* document is added to the index, a new eventLog is created. The document will
* be indexed after the method flushEventLog is called. This method is called by
* the LuceneSearchService finder methods.
* This method adds a collection of documents to the index. The documents are
* added immediately to the index. Calling this method within a running
* transaction leads to a uncommitted reads in the index. For transaction
* control, it is recommended to use instead the the method
* documentService.addDocumentToIndex() which takes care of uncommitted reads.
* <p>
* The method supports committed read. This means that a running transaction
* will not read an uncommitted document from the Lucene index.
* This method is used by the JobHandlerRebuildIndex only.
*
*
* @param documentContext
*/
public void updateDocument(ItemCollection documentContext);

/**
* This method adds a collection of documents to the Lucene index. For each
* document in a given selection a new eventLog is created. The documents will
* be indexed after the method flushEventLog is called. This method is called by
* the LuceneSearchService finder methods.
* <p>
* The method supports committed read. This means that a running transaction
* will not read uncommitted documents from the Lucene index.
*
* @see updateDocumentsUncommitted
* @param documents
* to be indexed
* of ItemCollections to be indexed
* @throws IndexException
*/
public void updateDocuments(Collection<ItemCollection> documents);
public void updateIndex(List<ItemCollection> documents);

/**
* This method adds a new eventLog for a document to be deleted from the index.
* The document will be removed from the index after the method fluschEventLog
* is called. This method is called by the LuceneSearchService finder method
* only.
* This method updates the search index based on the eventLog. Documents are
* added by the DocumentService as events to the EventLogService. This ensures
* that only committed documents are added into the index.
*
*
* @param uniqueID
* of the workitem to be removed
* @throws PluginException
* @see DocumentService
*/
public void removeDocument(String uniqueID);

public void updateIndex();
}
Loading

0 comments on commit 3d4353c

Please sign in to comment.