Skip to content

Commit

Permalink
rebuild index method addd
Browse files Browse the repository at this point in the history
Issue #554
  • Loading branch information
rsoika committed Aug 31, 2019
1 parent 2218264 commit 37bba0b
Show file tree
Hide file tree
Showing 5 changed files with 308 additions and 112 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -195,18 +195,41 @@ void init() {

}

/**
* Returns the field list defining the default content of the schema. The values
* of those items are only searchable by fulltext search
*
* @return
*/
public List<String> getFieldList() {
return fieldList;
}

/**
* Returns the analyzed field list of the schema. The values of those items are
* searchable by a field search. The values are analyzed.
*
* @return
*/
public List<String> getFieldListAnalyse() {
return fieldListAnalyse;
}

/**
* Returns the no-analyze field list of the schema. The values of those items
* are searchable by field search. The values are not analyzed.
*
* @return
*/
public List<String> getFieldListNoAnalyse() {
return fieldListNoAnalyse;
}

/**
* Returns the field list of items stored in the index.
*
* @return
*/
public List<String> getFieldListStore() {
return fieldListStore;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,81 @@ public boolean flushEventLog(int junkSize) {
return true;
}

/**
* This method forces an update of the full text index. The method also creates
* the index directory if it does not yet exist.
*/
public void rebuildIndex(Directory indexDir) throws IOException {
// create a IndexWriter Instance to make sure we have created the index
// directory..
IndexWriterConfig indexWriterConfig;
indexWriterConfig = new IndexWriterConfig(new ClassicAnalyzer());
indexWriterConfig.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);
IndexWriter indexWriter = new IndexWriter(indexDir, indexWriterConfig);
indexWriter.close();
// now starting index job....
logger.info("...rebuild lucene index job created...");
ItemCollection job = new ItemCollection();
job.replaceItemValue("numinterval", 2); // 2 minutes
job.replaceItemValue("job", AdminPService.JOB_REBUILD_INDEX);
adminPService.createJob(job);
}


/**
* This method adds a collection of documents to the Lucene index. The documents
* are added immediately to the index. Calling this method within a running
* transaction leads to a uncommitted reads in the index. For transaction
* control, it is recommended to use instead the the method updateDocumetns()
* which takes care of uncommitted reads.
* <p>
* This method is used by the JobHandlerRebuildIndex only.
*
* @param documents
* of ItemCollections to be indexed
* @throws IndexException
*/
public void updateDocumentsUncommitted(Collection<ItemCollection> documents) {

IndexWriter awriter = null;
long ltime = System.currentTimeMillis();
try {
awriter = createIndexWriter();
// add workitem to search index....
for (ItemCollection workitem : documents) {

if (!workitem.getItemValueBoolean(DocumentService.NOINDEX)) {
// create term
Term term = new Term("$uniqueid", workitem.getItemValueString("$uniqueid"));
logger.finest("......lucene add/update uncommitted workitem '"
+ workitem.getItemValueString(WorkflowKernel.UNIQUEID) + "' to index...");
awriter.updateDocument(term, createDocument(workitem));
}
}
} catch (IOException luceneEx) {
logger.warning("lucene error: " + luceneEx.getMessage());
throw new IndexException(IndexException.INVALID_INDEX, "Unable to update lucene search index", luceneEx);
} finally {
// close writer!
if (awriter != null) {
logger.finest("......lucene close IndexWriter...");
try {
awriter.close();
} catch (CorruptIndexException e) {
throw new IndexException(IndexException.INVALID_INDEX, "Unable to close lucene IndexWriter: ", e);
} catch (IOException e) {
throw new IndexException(IndexException.INVALID_INDEX, "Unable to close lucene IndexWriter: ", e);
}
}
}

if (logger.isLoggable(Level.FINE)) {
logger.fine("... update index block in " + (System.currentTimeMillis() - ltime) + " ms (" + documents.size()
+ " workitems total)");
}
}


/**
* This method flushes a given count of eventLogEntries. The method return true
* if no more eventLogEntries exist.
Expand All @@ -202,7 +277,7 @@ public boolean flushEventLog(int junkSize) {
* the max size of a eventLog engries to remove.
* @return true if the cache was totally flushed.
*/
private boolean flushEventLogByCount(int count) {
protected boolean flushEventLogByCount(int count) {
Date lastEventDate = null;
boolean cacheIsEmpty = true;
IndexWriter indexWriter = null;
Expand Down Expand Up @@ -298,7 +373,7 @@ private boolean flushEventLogByCount(int count) {
* @return
*/
@SuppressWarnings("unchecked")
public Document createDocument(ItemCollection aworkitem) {
protected Document createDocument(ItemCollection aworkitem) {
String sValue = null;
Document doc = new Document();
// combine all search fields from the search field list into one field
Expand Down Expand Up @@ -392,7 +467,7 @@ public Document createDocument(ItemCollection aworkitem) {
* @param store
* indicates if the value will become part of the Lucene document
*/
private void addItemValues(final Document doc, final ItemCollection workitem, final String _itemName,
protected void addItemValues(final Document doc, final ItemCollection workitem, final String _itemName,
final boolean analyzeValue, final boolean store) {

String itemName = _itemName;
Expand Down Expand Up @@ -438,27 +513,6 @@ private void addItemValues(final Document doc, final ItemCollection workitem, fi



/**
* This method forces an update of the full text index. The method also creates
* the index directory if it does not yet exist.
*/
public void rebuildIndex(Directory indexDir) throws IOException {
// create a IndexWriter Instance to make sure we have created the index
// directory..
IndexWriterConfig indexWriterConfig;
indexWriterConfig = new IndexWriterConfig(new ClassicAnalyzer());
indexWriterConfig.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);
IndexWriter indexWriter = new IndexWriter(indexDir, indexWriterConfig);
indexWriter.close();
// now starting index job....
logger.info("...rebuild lucene index job created...");
ItemCollection job = new ItemCollection();
job.replaceItemValue("numinterval", 2); // 2 minutes
job.replaceItemValue("job", AdminPService.JOB_REBUILD_INDEX);
adminPService.createJob(job);
}


/**
* This method creates a new instance of a lucene IndexWriter.
*
Expand All @@ -468,7 +522,7 @@ public void rebuildIndex(Directory indexDir) throws IOException {
* @return
* @throws IOException
*/
public IndexWriter createIndexWriter() throws IOException {
protected IndexWriter createIndexWriter() throws IOException {
// create a IndexWriter Instance
Directory indexDir = FSDirectory.open(Paths.get(luceneIndexDir));
// verify existence of index directory...
Expand All @@ -488,57 +542,4 @@ public IndexWriter createIndexWriter() throws IOException {

return new IndexWriter(indexDir, indexWriterConfig);
}

/**
* This method adds a collection of documents to the Lucene index. The documents
* are added immediately to the index. Calling this method within a running
* transaction leads to a uncommitted reads in the index. For transaction
* control, it is recommended to use instead the the method updateDocumetns()
* which takes care of uncommitted reads.
* <p>
* This method is used by the JobHandlerRebuildIndex only.
*
* @param documents
* of ItemCollections to be indexed
* @throws IndexException
*/
public void updateDocumentsUncommitted(Collection<ItemCollection> documents) {

IndexWriter awriter = null;
long ltime = System.currentTimeMillis();
try {
awriter = createIndexWriter();
// add workitem to search index....
for (ItemCollection workitem : documents) {

if (!workitem.getItemValueBoolean(DocumentService.NOINDEX)) {
// create term
Term term = new Term("$uniqueid", workitem.getItemValueString("$uniqueid"));
logger.finest("......lucene add/update uncommitted workitem '"
+ workitem.getItemValueString(WorkflowKernel.UNIQUEID) + "' to index...");
awriter.updateDocument(term, createDocument(workitem));
}
}
} catch (IOException luceneEx) {
logger.warning("lucene error: " + luceneEx.getMessage());
throw new IndexException(IndexException.INVALID_INDEX, "Unable to update lucene search index", luceneEx);
} finally {
// close writer!
if (awriter != null) {
logger.finest("......lucene close IndexWriter...");
try {
awriter.close();
} catch (CorruptIndexException e) {
throw new IndexException(IndexException.INVALID_INDEX, "Unable to close lucene IndexWriter: ", e);
} catch (IOException e) {
throw new IndexException(IndexException.INVALID_INDEX, "Unable to close lucene IndexWriter: ", e);
}
}
}

if (logger.isLoggable(Level.FINE)) {
logger.fine("... update index block in " + (System.currentTimeMillis() - ltime) + " ms (" + documents.size()
+ " workitems total)");
}
}
}
Loading

0 comments on commit 37bba0b

Please sign in to comment.