diff --git a/imixs-workflow-engine/src/main/java/org/imixs/workflow/engine/index/SchemaService.java b/imixs-workflow-engine/src/main/java/org/imixs/workflow/engine/index/SchemaService.java index 05e61b4d9..2a019c527 100644 --- a/imixs-workflow-engine/src/main/java/org/imixs/workflow/engine/index/SchemaService.java +++ b/imixs-workflow-engine/src/main/java/org/imixs/workflow/engine/index/SchemaService.java @@ -195,18 +195,41 @@ void init() { } + /** + * Returns the field list defining the default content of the schema. The values + * of those items are only searchable by fulltext search + * + * @return + */ public List getFieldList() { return fieldList; } + /** + * Returns the analyzed field list of the schema. The values of those items are + * searchable by a field search. The values are analyzed. + * + * @return + */ public List getFieldListAnalyse() { return fieldListAnalyse; } + /** + * Returns the no-analyze field list of the schema. The values of those items + * are searchable by field search. The values are not analyzed. + * + * @return + */ public List getFieldListNoAnalyse() { return fieldListNoAnalyse; } + /** + * Returns the field list of items stored in the index. + * + * @return + */ public List getFieldListStore() { return fieldListStore; } diff --git a/imixs-workflow-index-lucene/src/main/java/org/imixs/workflow/engine/lucene/LuceneIndexService.java b/imixs-workflow-index-lucene/src/main/java/org/imixs/workflow/engine/lucene/LuceneIndexService.java index 0ca633d2f..83ef13fbf 100644 --- a/imixs-workflow-index-lucene/src/main/java/org/imixs/workflow/engine/lucene/LuceneIndexService.java +++ b/imixs-workflow-index-lucene/src/main/java/org/imixs/workflow/engine/lucene/LuceneIndexService.java @@ -194,6 +194,81 @@ public boolean flushEventLog(int junkSize) { return true; } + /** + * This method forces an update of the full text index. The method also creates + * the index directory if it does not yet exist. + */ + public void rebuildIndex(Directory indexDir) throws IOException { + // create a IndexWriter Instance to make sure we have created the index + // directory.. + IndexWriterConfig indexWriterConfig; + indexWriterConfig = new IndexWriterConfig(new ClassicAnalyzer()); + indexWriterConfig.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND); + IndexWriter indexWriter = new IndexWriter(indexDir, indexWriterConfig); + indexWriter.close(); + // now starting index job.... + logger.info("...rebuild lucene index job created..."); + ItemCollection job = new ItemCollection(); + job.replaceItemValue("numinterval", 2); // 2 minutes + job.replaceItemValue("job", AdminPService.JOB_REBUILD_INDEX); + adminPService.createJob(job); + } + + + /** + * This method adds a collection of documents to the Lucene index. The documents + * are added immediately to the index. Calling this method within a running + * transaction leads to a uncommitted reads in the index. For transaction + * control, it is recommended to use instead the the method updateDocumetns() + * which takes care of uncommitted reads. + *

+ * This method is used by the JobHandlerRebuildIndex only. + * + * @param documents + * of ItemCollections to be indexed + * @throws IndexException + */ + public void updateDocumentsUncommitted(Collection documents) { + + IndexWriter awriter = null; + long ltime = System.currentTimeMillis(); + try { + awriter = createIndexWriter(); + // add workitem to search index.... + for (ItemCollection workitem : documents) { + + if (!workitem.getItemValueBoolean(DocumentService.NOINDEX)) { + // create term + Term term = new Term("$uniqueid", workitem.getItemValueString("$uniqueid")); + logger.finest("......lucene add/update uncommitted workitem '" + + workitem.getItemValueString(WorkflowKernel.UNIQUEID) + "' to index..."); + awriter.updateDocument(term, createDocument(workitem)); + } + } + } catch (IOException luceneEx) { + logger.warning("lucene error: " + luceneEx.getMessage()); + throw new IndexException(IndexException.INVALID_INDEX, "Unable to update lucene search index", luceneEx); + } finally { + // close writer! + if (awriter != null) { + logger.finest("......lucene close IndexWriter..."); + try { + awriter.close(); + } catch (CorruptIndexException e) { + throw new IndexException(IndexException.INVALID_INDEX, "Unable to close lucene IndexWriter: ", e); + } catch (IOException e) { + throw new IndexException(IndexException.INVALID_INDEX, "Unable to close lucene IndexWriter: ", e); + } + } + } + + if (logger.isLoggable(Level.FINE)) { + logger.fine("... update index block in " + (System.currentTimeMillis() - ltime) + " ms (" + documents.size() + + " workitems total)"); + } + } + + /** * This method flushes a given count of eventLogEntries. The method return true * if no more eventLogEntries exist. @@ -202,7 +277,7 @@ public boolean flushEventLog(int junkSize) { * the max size of a eventLog engries to remove. * @return true if the cache was totally flushed. */ - private boolean flushEventLogByCount(int count) { + protected boolean flushEventLogByCount(int count) { Date lastEventDate = null; boolean cacheIsEmpty = true; IndexWriter indexWriter = null; @@ -298,7 +373,7 @@ private boolean flushEventLogByCount(int count) { * @return */ @SuppressWarnings("unchecked") - public Document createDocument(ItemCollection aworkitem) { + protected Document createDocument(ItemCollection aworkitem) { String sValue = null; Document doc = new Document(); // combine all search fields from the search field list into one field @@ -392,7 +467,7 @@ public Document createDocument(ItemCollection aworkitem) { * @param store * indicates if the value will become part of the Lucene document */ - private void addItemValues(final Document doc, final ItemCollection workitem, final String _itemName, + protected void addItemValues(final Document doc, final ItemCollection workitem, final String _itemName, final boolean analyzeValue, final boolean store) { String itemName = _itemName; @@ -438,27 +513,6 @@ private void addItemValues(final Document doc, final ItemCollection workitem, fi - /** - * This method forces an update of the full text index. The method also creates - * the index directory if it does not yet exist. - */ - public void rebuildIndex(Directory indexDir) throws IOException { - // create a IndexWriter Instance to make sure we have created the index - // directory.. - IndexWriterConfig indexWriterConfig; - indexWriterConfig = new IndexWriterConfig(new ClassicAnalyzer()); - indexWriterConfig.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND); - IndexWriter indexWriter = new IndexWriter(indexDir, indexWriterConfig); - indexWriter.close(); - // now starting index job.... - logger.info("...rebuild lucene index job created..."); - ItemCollection job = new ItemCollection(); - job.replaceItemValue("numinterval", 2); // 2 minutes - job.replaceItemValue("job", AdminPService.JOB_REBUILD_INDEX); - adminPService.createJob(job); - } - - /** * This method creates a new instance of a lucene IndexWriter. * @@ -468,7 +522,7 @@ public void rebuildIndex(Directory indexDir) throws IOException { * @return * @throws IOException */ - public IndexWriter createIndexWriter() throws IOException { + protected IndexWriter createIndexWriter() throws IOException { // create a IndexWriter Instance Directory indexDir = FSDirectory.open(Paths.get(luceneIndexDir)); // verify existence of index directory... @@ -488,57 +542,4 @@ public IndexWriter createIndexWriter() throws IOException { return new IndexWriter(indexDir, indexWriterConfig); } - - /** - * This method adds a collection of documents to the Lucene index. The documents - * are added immediately to the index. Calling this method within a running - * transaction leads to a uncommitted reads in the index. For transaction - * control, it is recommended to use instead the the method updateDocumetns() - * which takes care of uncommitted reads. - *

- * This method is used by the JobHandlerRebuildIndex only. - * - * @param documents - * of ItemCollections to be indexed - * @throws IndexException - */ - public void updateDocumentsUncommitted(Collection documents) { - - IndexWriter awriter = null; - long ltime = System.currentTimeMillis(); - try { - awriter = createIndexWriter(); - // add workitem to search index.... - for (ItemCollection workitem : documents) { - - if (!workitem.getItemValueBoolean(DocumentService.NOINDEX)) { - // create term - Term term = new Term("$uniqueid", workitem.getItemValueString("$uniqueid")); - logger.finest("......lucene add/update uncommitted workitem '" - + workitem.getItemValueString(WorkflowKernel.UNIQUEID) + "' to index..."); - awriter.updateDocument(term, createDocument(workitem)); - } - } - } catch (IOException luceneEx) { - logger.warning("lucene error: " + luceneEx.getMessage()); - throw new IndexException(IndexException.INVALID_INDEX, "Unable to update lucene search index", luceneEx); - } finally { - // close writer! - if (awriter != null) { - logger.finest("......lucene close IndexWriter..."); - try { - awriter.close(); - } catch (CorruptIndexException e) { - throw new IndexException(IndexException.INVALID_INDEX, "Unable to close lucene IndexWriter: ", e); - } catch (IOException e) { - throw new IndexException(IndexException.INVALID_INDEX, "Unable to close lucene IndexWriter: ", e); - } - } - } - - if (logger.isLoggable(Level.FINE)) { - logger.fine("... update index block in " + (System.currentTimeMillis() - ltime) + " ms (" + documents.size() - + " workitems total)"); - } - } } diff --git a/imixs-workflow-index-solr/src/main/java/org/imixs/workflow/engine/solr/SolrIndexService.java b/imixs-workflow-index-solr/src/main/java/org/imixs/workflow/engine/solr/SolrIndexService.java index 3daf0d83a..4b45ebd3f 100644 --- a/imixs-workflow-index-solr/src/main/java/org/imixs/workflow/engine/solr/SolrIndexService.java +++ b/imixs-workflow-index-solr/src/main/java/org/imixs/workflow/engine/solr/SolrIndexService.java @@ -28,7 +28,11 @@ package org.imixs.workflow.engine.solr; import java.io.IOException; +import java.text.SimpleDateFormat; +import java.util.Calendar; +import java.util.Date; import java.util.List; +import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.PostConstruct; @@ -39,8 +43,11 @@ import javax.inject.Inject; import org.eclipse.microprofile.config.inject.ConfigProperty; +import org.imixs.workflow.ItemCollection; import org.imixs.workflow.engine.SetupEvent; +import org.imixs.workflow.engine.adminp.AdminPService; import org.imixs.workflow.engine.index.SchemaService; +import org.imixs.workflow.exceptions.IndexException; import org.imixs.workflow.services.rest.BasicAuthenticator; import org.imixs.workflow.services.rest.RestAPIException; import org.imixs.workflow.services.rest.RestClient; @@ -88,6 +95,10 @@ public class SolrIndexService { @Inject private SchemaService schemaService; + + @Inject + private AdminPService adminPService; + private RestClient restClient; @@ -146,31 +157,79 @@ public void setup(@Observes SetupEvent setupEvent) throws RestAPIException { public void updateSchema(String schema) throws RestAPIException { // create the schema.... - String schemaUpdate = createUpdateSchema(schema); + String schemaUpdate = createUpdateSchemaJSONRequest(schema); // test if the schemaUdpate contains instructions.... if (!"{}".equals(schemaUpdate)) { String uri = host + "/api/cores/" + core + "/schema"; logger.info("...update schema '" + core + "':"); logger.info("..." + schemaUpdate); restClient.post(uri, schemaUpdate, "application/json"); + + // force rebuild index + rebuildIndex(); } else { logger.info("...schema = OK "); } } + /** + * This method adds a collection of documents to the Lucene solr index. The + * documents are added immediately to the index. Calling this method within a + * running transaction leads to a uncommitted reads in the index. For + * transaction control, it is recommended to use instead the the method + * updateDocumetns() which takes care of uncommitted reads. + *

+ * This method is used by the JobHandlerRebuildIndex only. + * + * @param documents + * of ItemCollections to be indexed + * @throws RestAPIException + * @throws IndexException + */ + public void updateDocumentsUncommitted(List documents) throws RestAPIException { + long ltime = System.currentTimeMillis(); + if (documents == null || documents.size() == 0) { + // no op! + return; + } else { + + String xmlRequest = createAddDocumentsXMLRequest(documents); + String uri = host + "/solr/" + core + "/update"; + logger.info("...update documents '" + core + "':"); + restClient.post(uri, xmlRequest, "text/xml"); + } + if (logger.isLoggable(Level.FINE)) { + logger.fine("... update index block in " + (System.currentTimeMillis() - ltime) + " ms (" + documents.size() + + " workitems total)"); + } + } + /** - * This method returns a JSON structure to to update an existing Solr schema. - * The method adds all fields into a solr update definition that did not yet - * exist in the current schema. + * This method forces an update of the full text index. + */ + public void rebuildIndex() { + // now starting index job.... + logger.info("...rebuild lucene index job created..."); + ItemCollection job = new ItemCollection(); + job.replaceItemValue("numinterval", 2); // 2 minutes + job.replaceItemValue("job", AdminPService.JOB_REBUILD_INDEX); + adminPService.createJob(job); + } + + + /** + * This method returns a JSON structure to update an existing Solr schema. The + * method adds all fields into a solr update definition that did not yet exist + * in the current schema. *

* The param schema contains the current schema definition of the core. * * @return */ - private String createUpdateSchema(String oldSchema) { + protected String createUpdateSchemaJSONRequest(String oldSchema) { StringBuffer updateSchema = new StringBuffer(); List fieldListStore = schemaService.getFieldListStore(); @@ -183,21 +242,21 @@ private String createUpdateSchema(String oldSchema) { updateSchema.append("{"); // finally add the default content field - addFieldIntoUpdateSchema(updateSchema, oldSchema, "content", "text_general", false); + addFieldDefinitionToUpdateSchema(updateSchema, oldSchema, "content", "text_general", false); // add each field from the fieldListAnalyse for (String field : fieldListAnalyse) { boolean store = fieldListStore.contains(field); - addFieldIntoUpdateSchema(updateSchema, oldSchema, field, "text_general", store); + addFieldDefinitionToUpdateSchema(updateSchema, oldSchema, field, "text_general", store); } // add each field from the fieldListNoAnalyse for (String field : fieldListNoAnalyse) { boolean store = fieldListStore.contains(field); - addFieldIntoUpdateSchema(updateSchema, oldSchema, field, "strings", store); + addFieldDefinitionToUpdateSchema(updateSchema, oldSchema, field, "strings", store); } // finally add the $uniqueid field - addFieldIntoUpdateSchema(updateSchema, oldSchema, "$uniqueid", "string", true); + addFieldDefinitionToUpdateSchema(updateSchema, oldSchema, "$uniqueid", "string", true); // remove last , int lastComma = updateSchema.lastIndexOf(","); @@ -208,6 +267,77 @@ private String createUpdateSchema(String oldSchema) { return updateSchema.toString(); } + /** + * This method returns a XNK structure to add new documents into the solr index. + * + * @return xml content to update documents + */ + protected String createAddDocumentsXMLRequest(List documents) { + + List fieldList = schemaService.getFieldList(); + List fieldListAnalyse = schemaService.getFieldListAnalyse(); + List fieldListNoAnalyse = schemaService.getFieldListNoAnalyse(); + SimpleDateFormat dateformat = new SimpleDateFormat("yyyyMMddHHmmss"); + + StringBuffer xmlContent = new StringBuffer(); + + xmlContent.append(""); + + for (ItemCollection document : documents) { + xmlContent.append(""); + + // add all content fields defined in the schema + String content = ""; + for (String field : fieldList) { + String sValue = ""; + // check value list - skip empty fields + List vValues = document.getItemValue(field); + if (vValues.size() == 0) + continue; + // get all values of a value list field + for (Object o : vValues) { + if (o == null) + // skip null values + continue; + + if (o instanceof Calendar || o instanceof Date) { + + // convert calendar to string + String sDateValue; + if (o instanceof Calendar) + sDateValue = dateformat.format(((Calendar) o).getTime()); + else + sDateValue = dateformat.format((Date) o); + sValue += sDateValue + ","; + + } else + // simple string representation + sValue += o.toString() + ","; + } + if (sValue != null) { + content += sValue + ","; + } + } + logger.finest("......add index field content=" + content); + xmlContent.append("" + content + ""); + + // now add all analyzed fields... + for (String aFieldname : fieldListAnalyse) { + addFieldValuesToUpdateRequest(xmlContent, document, aFieldname); + } + // now add all notanalyzed fields... + for (String aFieldname : fieldListNoAnalyse) { + addFieldValuesToUpdateRequest(xmlContent, document, aFieldname); + } + + xmlContent.append(""); + } + + xmlContent.append(""); + + return xmlContent.toString(); + } + /** * This method adds a 'add-field' object to an updateSchema. *

@@ -234,7 +364,7 @@ private String createUpdateSchema(String oldSchema) { * - true if a ',' should be added to the end of the updateSchema. * */ - private void addFieldIntoUpdateSchema(StringBuffer updateSchema, String oldSchema, String name, String type, + private void addFieldDefinitionToUpdateSchema(StringBuffer updateSchema, String oldSchema, String name, String type, boolean store) { String fieldDefinition = "{\"name\":\"" + name + "\",\"type\":\"" + type + "\",\"stored\":" + store + "}"; @@ -245,4 +375,51 @@ private void addFieldIntoUpdateSchema(StringBuffer updateSchema, String oldSchem } } + /** + * This method adds a field value into a xml update request. + * + * @param doc + * an existing lucene document + * @param workitem + * the workitem containing the values + * @param _itemName + * the item name inside the workitem + */ + private void addFieldValuesToUpdateRequest(StringBuffer xmlContent, final ItemCollection workitem, + final String _itemName) { + + SimpleDateFormat dateformat = new SimpleDateFormat("yyyyMMddHHmmss"); + + if (_itemName == null) { + return; + } + + List vValues = workitem.getItemValue(_itemName); + if (vValues.size() == 0) { + return; + } + if (vValues.get(0) == null) { + return; + } + + String itemName = _itemName.toLowerCase().trim(); + for (Object singleValue : vValues) { + String convertedValue = ""; + if (singleValue instanceof Calendar || singleValue instanceof Date) { + // convert calendar to lucene string representation + String sDateValue; + if (singleValue instanceof Calendar) { + sDateValue = dateformat.format(((Calendar) singleValue).getTime()); + } else { + sDateValue = dateformat.format((Date) singleValue); + } + convertedValue = sDateValue; + } else { + // default + convertedValue = singleValue.toString(); + } + xmlContent.append("" + convertedValue + ""); + } + + } } diff --git a/imixs-workflow-index-solr/src/main/java/org/imixs/workflow/engine/solr/SolrSearchService.java b/imixs-workflow-index-solr/src/main/java/org/imixs/workflow/engine/solr/SolrSearchService.java index 256de0487..ecbea5288 100644 --- a/imixs-workflow-index-solr/src/main/java/org/imixs/workflow/engine/solr/SolrSearchService.java +++ b/imixs-workflow-index-solr/src/main/java/org/imixs/workflow/engine/solr/SolrSearchService.java @@ -27,7 +27,6 @@ package org.imixs.workflow.engine.solr; -import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; @@ -37,13 +36,11 @@ import javax.ejb.Stateless; import javax.inject.Inject; -import org.eclipse.microprofile.config.inject.ConfigProperty; import org.imixs.workflow.ItemCollection; import org.imixs.workflow.engine.DocumentService; import org.imixs.workflow.engine.index.DefaultOperator; import org.imixs.workflow.engine.index.SchemaService; import org.imixs.workflow.engine.index.SearchService; -import org.imixs.workflow.exceptions.InvalidAccessException; import org.imixs.workflow.exceptions.QueryException; /** diff --git a/imixs-workflow-index-solr/src/main/java/org/imixs/workflow/engine/solr/SolrUpdateService.java b/imixs-workflow-index-solr/src/main/java/org/imixs/workflow/engine/solr/SolrUpdateService.java index 4dc5dadd7..16fa9185c 100644 --- a/imixs-workflow-index-solr/src/main/java/org/imixs/workflow/engine/solr/SolrUpdateService.java +++ b/imixs-workflow-index-solr/src/main/java/org/imixs/workflow/engine/solr/SolrUpdateService.java @@ -27,7 +27,6 @@ package org.imixs.workflow.engine.solr; -import java.util.Collection; import java.util.List; import java.util.logging.Logger; @@ -36,17 +35,16 @@ import javax.inject.Inject; import org.imixs.workflow.ItemCollection; -import org.imixs.workflow.engine.EventLogService; import org.imixs.workflow.engine.index.UpdateService; import org.imixs.workflow.exceptions.IndexException; -import org.imixs.workflow.exceptions.PluginException; +import org.imixs.workflow.services.rest.RestAPIException; /** - * The SolrUpdateService provides methods to write Imixs Workitems into a - * Solr search index. With the method addWorkitem() a - * ItemCollection can be added to a Solr search index. The service init method - * reads the property file 'imixs.properties' from the current classpath to - * determine the configuration. + * The SolrUpdateService provides methods to write Imixs Workitems into a Solr + * search index. With the method addWorkitem() a ItemCollection can + * be added to a Solr search index. The service init method reads the property + * file 'imixs.properties' from the current classpath to determine the + * configuration. * *

    *
  • The property "solr.core" defines the Solr core for the lucene index @@ -59,9 +57,10 @@ @Stateless public class SolrUpdateService implements UpdateService { + @Inject + SolrIndexService solrIndexService; - - private static Logger logger = Logger.getLogger(SolrUpdateService.class.getName()); + private static Logger logger = Logger.getLogger(SolrUpdateService.class.getName()); /** * PostContruct event - The method loads the lucene index properties from the @@ -73,7 +72,7 @@ public class SolrUpdateService implements UpdateService { void init() { logger.finest("...... "); - + } /** @@ -87,24 +86,23 @@ void init() { * * @param documents * of ItemCollections to be indexed + * @throws RestAPIException * @throws IndexException */ @Override public void updateIndex(List documents) { - - logger.warning(" unimplemented !!!!"); + try { + solrIndexService.updateDocumentsUncommitted(documents); + } catch (RestAPIException e) { + logger.severe("Failed to update document collection: " + e.getMessage()); + throw new IndexException(IndexException.INVALID_INDEX, "Unable to update solr search index", e); + } } - - + @Override public void updateIndex() { - + // TODO logger.warning(" unimplemented !!!!"); } - - - - - }