idok-commit AT lists.psi.ch
Subject: Commit emails of the iDok project
List archive
[idok-commit] idok commit r369 - in trunk: . java/ch/idok/common/config java/ch/idok/common/impl/repository/svn java/ch/idok/common/repository java/ch/idok/dmsd/config java/ch/idok/dmsd/impl/config/svnlucene java/ch/idok/dmsd/impl/controller java/ch/idok/dmsd/impl/extractor java/ch/idok/dmsd/impl/extractor/microsoft java/ch/idok/dmsd/impl/indexer/lucene java/ch/idok/dmsd/impl/loader java/ch/idok/dmsd/impl/queue java/ch/idok/dmsd/impl/updatelist java/ch/idok/dmsd/indexer java/ch/idok/dmsd/monitoring java/ch/idok/dmsd/pipeline java/ch/idok/service/server/search/lucene
Chronological Thread
- From: "AFS account Stadler Hans Christian" <stadler_h AT savannah.psi.ch>
- To: idok-commit AT lists.psi.ch
- Subject: [idok-commit] idok commit r369 - in trunk: . java/ch/idok/common/config java/ch/idok/common/impl/repository/svn java/ch/idok/common/repository java/ch/idok/dmsd/config java/ch/idok/dmsd/impl/config/svnlucene java/ch/idok/dmsd/impl/controller java/ch/idok/dmsd/impl/extractor java/ch/idok/dmsd/impl/extractor/microsoft java/ch/idok/dmsd/impl/indexer/lucene java/ch/idok/dmsd/impl/loader java/ch/idok/dmsd/impl/queue java/ch/idok/dmsd/impl/updatelist java/ch/idok/dmsd/indexer java/ch/idok/dmsd/monitoring java/ch/idok/dmsd/pipeline java/ch/idok/service/server/search/lucene
- Date: Tue, 23 Jun 2009 11:04:52 +0200
- List-archive: <https://lists.web.psi.ch/pipermail/idok-commit/>
- List-id: Commit emails of the iDok project <idok-commit.lists.psi.ch>
Author: stadler_h
Date: Tue Jun 23 11:04:51 2009
New Revision: 369
Log:
Merged branches/dmsd-threaded r356:368 into trunk
Added:
trunk/java/ch/idok/dmsd/impl/queue/EventPriority.java
- copied unchanged from r368,
/branches/dmsd-threaded/java/ch/idok/dmsd/impl/queue/EventPriority.java
Modified:
trunk/java/ch/idok/common/config/Setup.java
trunk/java/ch/idok/common/impl/repository/svn/CachingSvnRepositoryManager.java
trunk/java/ch/idok/common/impl/repository/svn/SvnRepositoryId.java
trunk/java/ch/idok/common/impl/repository/svn/SvnRepositoryManager.java
trunk/java/ch/idok/common/repository/Repository.java
trunk/java/ch/idok/common/repository/RepositoryManager.java
trunk/java/ch/idok/dmsd/config/Config.java
trunk/java/ch/idok/dmsd/impl/config/svnlucene/Config.java
trunk/java/ch/idok/dmsd/impl/controller/SimpleController.java
trunk/java/ch/idok/dmsd/impl/extractor/Extractor.java
trunk/java/ch/idok/dmsd/impl/extractor/microsoft/MSOfficeExtractorFactory.java
trunk/java/ch/idok/dmsd/impl/indexer/lucene/Indexer.java
trunk/java/ch/idok/dmsd/impl/loader/ContentLoader.java
trunk/java/ch/idok/dmsd/impl/queue/DataEvent.java
trunk/java/ch/idok/dmsd/impl/queue/Event.java
trunk/java/ch/idok/dmsd/impl/queue/EventQueue.java
trunk/java/ch/idok/dmsd/impl/queue/SignalEvent.java
trunk/java/ch/idok/dmsd/impl/updatelist/SimpleUpdateListGenerator.java
trunk/java/ch/idok/dmsd/indexer/ContentExtractor.java
trunk/java/ch/idok/dmsd/indexer/ContentExtractorFactory.java
trunk/java/ch/idok/dmsd/monitoring/MonitoringHandler.java
trunk/java/ch/idok/dmsd/pipeline/PipelineController.java
trunk/java/ch/idok/dmsd/pipeline/PipelineQueue.java
trunk/java/ch/idok/dmsd/pipeline/PipelineStage.java
trunk/java/ch/idok/service/server/search/lucene/LuceneSearchService.java
trunk/local-server.xml
Modified: trunk/java/ch/idok/common/config/Setup.java
==============================================================================
--- trunk/java/ch/idok/common/config/Setup.java (original)
+++ trunk/java/ch/idok/common/config/Setup.java Tue Jun 23 11:04:51 2009
@@ -168,6 +168,8 @@
* WARNIG: This value can change when a new svnkit version is
* installed!
*
+ * This method must be thread safe!
+ *
* @return The meta data key for the version meta datum
*/
public String getVersionKey() {
@@ -177,6 +179,9 @@
/**
* @brief Retrieve the key for the searchable version metadatum
+ *
+ * This method must be thread safe!
+ *
* @return The meta data key for the searchable version meta datum
*/
public String getSearchableVersionKey() {
@@ -186,6 +191,9 @@
/**
* @brief Retrieve the key for the document id meta datum
+ *
+ * This method must be thread safe!
+ *
* @return The meta data key for the document id meta datum
*/
public String getDocIdKey() {
@@ -203,7 +211,10 @@
/**
* @brief Retrieve the key for the relative document path metadatum
*
- * Relative is relative to the repository root.
+ * Relative is relative to the repository root.
+ *
+ * This method must be thread safe!
+ *
* @return The meta data key for the relative document path meta datum
*/
public String getRelativePathKey() {
@@ -213,6 +224,9 @@
/**
* @brief Retrieve the key for the filesize metadatum
+ *
+ * This method must be thread safe!
+ *
* @return The meta data key for the filesize meta datum
*/
public String getFilesizeKey() {
@@ -221,6 +235,9 @@
/**
* @brief Original file name for compressed or archived documents.
+ *
+ * This method must be thread safe!
+ *
* @return The original file name of the document.
*/
public String getOriginalName() {
@@ -235,6 +252,8 @@
* mime type can be retrieved with the metadata key retrie-
veable by
* this method.
*
+ * This method must be thread safe!
+ *
* @return The meta data key for the mime type meta datum.
*/
public String getMimeKey() {
@@ -243,6 +262,9 @@
/**
* @brief The meta data key for the original mime type meta datum.
+ *
+ * This method must be thread safe!
+ *
* @return The meta data key for the original mime type meta datum.
*/
public String getOriginalMimeKey() {
@@ -252,6 +274,8 @@
/**
* @brief The meta data key for the MD5 digest of the file content.
+ *
+ * This method must be thread safe!
*/
public String getChecksumKey() {
return System.getProperty("common.config.checksumKey",
@@ -260,6 +284,8 @@
/**
* @brief The meta data key for the MD5 digest of the file content.
+ *
+ * This method must be thread safe!
*/
public String getIndexingExceptionKey() {
return System.getProperty("common.config.indexingExceptionKey",
Modified:
trunk/java/ch/idok/common/impl/repository/svn/CachingSvnRepositoryManager.java
==============================================================================
---
trunk/java/ch/idok/common/impl/repository/svn/CachingSvnRepositoryManager.java
(original)
+++
trunk/java/ch/idok/common/impl/repository/svn/CachingSvnRepositoryManager.java
Tue Jun 23 11:04:51 2009
@@ -26,6 +26,7 @@
import ch.idok.common.config.Setup;
import ch.idok.common.errorhandling.DmsException;
import ch.idok.common.errorhandling.ErrorType;
+import ch.idok.common.errorhandling.Util;
import ch.idok.common.repository.Repository;
import ch.idok.common.repository.RepositoryId;
@@ -57,6 +58,8 @@
* @brief Retrieve the repository object of the repository with the given
* id.
*
+ * This method must be thread safe!
+ *
* @param repositoryId
* The repository id.
* @throws DmsException
@@ -66,19 +69,22 @@
public Repository getRepository(RepositoryId repositoryId)
throws DmsException {
DmsException exception;
- try {
- Repository repository = repositoryCache.get(repositoryId);
- if (repository != null)
- return repository;
- Repository repo = super.getRepository(repositoryId);
- logger.finest("Added to repo cache: " + repositoryId);
- repositoryCache.put(repositoryId.copy(), repo);
- return repo;
+ try {
+ synchronized(repositoryCache) {
+ Repository repository = repositoryCache.get(repositoryId);
+ if (repository != null)
+ return repository;
+ Repository repo = super.getRepository(repositoryId);
+ logger.finest("Added to repo cache: " + repositoryId);
+ repositoryCache.put(repositoryId.copy(), repo);
+ return repo;
+ }
} catch (DmsException ex) {
exception = ex;
} catch (Throwable th) {
+ logger.severe("getRepository failed:
"+th+"\n"+Util.getStackTrace(th));
exception = new DmsException(ErrorType.INTERNAL, this,
- "Internal error detected", "", th);
+ "Internal error detected",
"CachingSvnRepositoryManager.getRepository() failed", th);
}
throw exception;
}
Modified: trunk/java/ch/idok/common/impl/repository/svn/SvnRepositoryId.java
==============================================================================
--- trunk/java/ch/idok/common/impl/repository/svn/SvnRepositoryId.java
(original)
+++ trunk/java/ch/idok/common/impl/repository/svn/SvnRepositoryId.java Tue
Jun 23 11:04:51 2009
@@ -123,6 +123,7 @@
* @throws DmsException
*/
private void init(URI uri) throws DmsException {
+ URI repoURI = null;
try {
// Parse user/pass from URI
String userInfo = uri.getUserInfo();
@@ -137,7 +138,7 @@
svnPassword = userInfo.substring(at + 1);
}
// Remove user/pass and normalise URI
- URI repoURI = URIUtil.normalizeURI(uri);
+ repoURI = URIUtil.normalizeURI(uri);
svnUrl = SVNURL.parseURIEncoded(repoURI.toASCIIString());
} catch (URISyntaxException e) {
@@ -145,7 +146,7 @@
"", e);
} catch (SVNException e) {
throw new DmsException(ErrorType.REPO_ID, this,
- "Invalid subversion URL", "", e);
+ "Invalid subversion URL", repoURI.toString(), e);
}
}
Modified:
trunk/java/ch/idok/common/impl/repository/svn/SvnRepositoryManager.java
==============================================================================
--- trunk/java/ch/idok/common/impl/repository/svn/SvnRepositoryManager.java
(original)
+++ trunk/java/ch/idok/common/impl/repository/svn/SvnRepositoryManager.java
Tue Jun 23 11:04:51 2009
@@ -47,6 +47,7 @@
import ch.idok.common.config.Setup;
import ch.idok.common.errorhandling.DmsException;
import ch.idok.common.errorhandling.ErrorType;
+import ch.idok.common.errorhandling.Util;
import
ch.idok.common.impl.repository.svn.kerberized.DmsHTTPConnectionFactory;
import ch.idok.common.impl.repository.svn.kerberized.SVNKerb5Authentication;
import ch.idok.common.repository.Repository;
@@ -85,6 +86,8 @@
/**
* Construct a RepositoryId refering to a Subversion repository whose
root
* URI is <code>uri</code>.
+ *
+ * May be called concurrently.
*/
public RepositoryId getRepositoryId(URI uri) throws DmsException {
return new SvnRepositoryId(uri);
@@ -93,6 +96,8 @@
/**
* Construct a RepositoryId refering to a Subversion repository whose
root
* URI is given by <code>uri</code> and <code>where</code>.
+ *
+ * May be called concurrently.
*/
public RepositoryId getRepositoryId(URI uri, LocationType where)
throws DmsException {
@@ -102,12 +107,14 @@
/**
* Retrieve the repository object of the repository with the given id.
*
+ * May be called concurrently.
+ *
* @param repositoryId
* The repository id.
* @throws DmsException
* If the repository cannot be accessed.
*/
- public Repository getRepository(RepositoryId repositoryId)
+ synchronized public Repository getRepository(RepositoryId repositoryId)
throws DmsException {
try {
logger_.fine("Opening " + repositoryId);
@@ -139,8 +146,9 @@
return new SvnRepository(svnRepo, repoId, handler_);
} catch (Throwable th) {
+ logger_.severe("getRepository failed:
"+th+"\n"+Util.getStackTrace(th));
throw new DmsException(ErrorType.INTERNAL, this,
- "Internal error detected", "", th);
+ "Internal error detected",
"SvnRepositoryManager.getRepository() failed", th);
}
}
Modified: trunk/java/ch/idok/common/repository/Repository.java
==============================================================================
--- trunk/java/ch/idok/common/repository/Repository.java (original)
+++ trunk/java/ch/idok/common/repository/Repository.java Tue Jun 23
11:04:51 2009
@@ -36,6 +36,8 @@
* list with changes to the repository between two repository versions. The
* repository must be able to provide the actual version and the version for
the
* initial empty repository.
+ *
+ * The methods in this class may be called concurrently.
*/
public interface Repository {
Modified: trunk/java/ch/idok/common/repository/RepositoryManager.java
==============================================================================
--- trunk/java/ch/idok/common/repository/RepositoryManager.java (original)
+++ trunk/java/ch/idok/common/repository/RepositoryManager.java Tue Jun 23
11:04:51 2009
@@ -53,17 +53,23 @@
/**
* @brief Convert a repository identifier given as a URI to a
RepositoryId
* object.
+ *
+ * May be called concurrently.
*/
RepositoryId getRepositoryId(URI uri) throws DmsException;
/**
* @brief Create RepositoryId object from URI and LocationType object.
+ *
+ * May be called concurrently.
*/
RepositoryId getRepositoryId(URI uri, LocationType where)
throws DmsException;
/**
* @brief Retrieve Repository object for the given repository id.
+ *
+ * May be called concurrently.
*/
Repository getRepository(RepositoryId repositoryId) throws DmsException;
}
Modified: trunk/java/ch/idok/dmsd/config/Config.java
==============================================================================
--- trunk/java/ch/idok/dmsd/config/Config.java (original)
+++ trunk/java/ch/idok/dmsd/config/Config.java Tue Jun 23 11:04:51 2009
@@ -94,6 +94,8 @@
* @brief Retrieve the controller object of the constructed document
* handling pipeline.
*
+ * This method may be called concurrently.
+ *
* @return The controller object of the constructed pipeline.
*
* @pre initialize() must be called first.
@@ -103,6 +105,8 @@
/**
* @brief Get a logger instance with the specified name.
*
+ * This method may be called concurrently.
+ *
* @param subSystemName
* The name of the logger. subSystemName should start with
* "dmsd." in order to inherit logger properties
specified on
@@ -113,6 +117,8 @@
/**
* @brief Retrieve the monitoring handler.
*
+ * This method may be called concurrently.
+ *
* @pre initialize() must be called first.
*/
MonitoringHandler getMonitoringManager();
@@ -127,6 +133,8 @@
/**
* @brief Retrieve the repository manager.
*
+ * This method may be called concurrently.
+ *
* @pre initialize() must be called first.
*/
RepositoryManager getRepositoryManger() throws DmsException;
@@ -134,6 +142,8 @@
/**
* @brief Retrieve the last indexed version.
*
+ * This method may be called concurrently.
+ *
* @param repositoryId
* The repository for which the last indexed version
should
* be retrieved.
@@ -148,6 +158,8 @@
*
* This routine will be called from the Indexer after accomplished work.
*
+ * This method may be called concurrently.
+ *
* @param repositoryId
* The repository for which the last indexed veersion is
to
* be set.
@@ -175,6 +187,8 @@
/**
* @brief Retrieve content extractor.
*
+ * This method may be called concurrently.
+ *
* @param mimeType
* The mime type that the extractor should be able to
handle.
* @param rawContent
@@ -191,6 +205,8 @@
/**
* @brief Check if content extractor exists for a given mime type.
*
+ * This method may be called concurrently.
+ *
* @param mimeType
* The given mime type.
*
@@ -201,6 +217,8 @@
/**
* @brief Retrieve the indexing mode.
*
+ * This method may be called concurrently.
+ *
* @return The indexing mode which specifies which document are to be
* indexed.
*
@@ -231,6 +249,8 @@
* All documents must be stored together with their mime type. The mime
type
* can be retrieved with the metadata key retrie- veable by this method.
*
+ * This method may be called concurrently.
+ *
* @return The meta data key for the mime type meta datum.
* @pre initialize() must be called first.
*/
Modified: trunk/java/ch/idok/dmsd/impl/config/svnlucene/Config.java
==============================================================================
--- trunk/java/ch/idok/dmsd/impl/config/svnlucene/Config.java (original)
+++ trunk/java/ch/idok/dmsd/impl/config/svnlucene/Config.java Tue Jun 23
11:04:51 2009
@@ -39,6 +39,7 @@
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
+import java.util.concurrent.ConcurrentHashMap;
import java.util.logging.Logger;
import org.tmatesoft.svn.core.internal.io.dav.DAVRepositoryFactory;
@@ -165,7 +166,7 @@
}
/** @brief Mapping from existing repositories to pipeline parts. */
- HashMap<File, PipelinePart> pipelineParts;
+ ConcurrentHashMap<File, PipelinePart> pipelineParts;
/** @brief Constructor */
public Config() {
@@ -236,7 +237,7 @@
DAVRepositoryFactory.setup();
SVNRepositoryFactoryImpl.setup();
noOp = new NoOp();
- pipelineParts = new HashMap<File, PipelinePart>();
+ pipelineParts = new ConcurrentHashMap<File, PipelinePart>();
// ----------------------------------
repoRootDir = new File(args[3]).getCanonicalFile();
if (!repoRootDir.isDirectory())
@@ -310,6 +311,8 @@
/**
* @brief Retrieve the pipeline controller.
+ *
+ * This method must be thread safe!
*/
public PipelineController getPipelineController() {
return controller;
@@ -318,6 +321,8 @@
/**
* @brief Get a logger instance with the specified name.
*
+ * This method must be thread safe!
+ *
* @param subSystemName
* The name of the logger. subSystemName should start with
* "dmsd." in order to inherit logger properties
specified on
@@ -330,6 +335,8 @@
/**
* @brief Retrieve the monitoring handler.
+ *
+ * This method must be thread safe!
*/
public MonitoringHandler getMonitoringManager() {
return controller;
@@ -344,6 +351,8 @@
/**
* @brief Retrieve the repository manager.
+ *
+ * This method must be thread safe!
*/
public RepositoryManager getRepositoryManger() {
return repositoryManager;
@@ -356,6 +365,8 @@
* directory of the repository. If the file is empty, it returns the
version
* of the empty repository.
*
+ * This method must be thread safe!
+ *
* @param repoId
* The ID of the repository for which the last indexed
* version is to be retrieved.
@@ -370,9 +381,12 @@
File repo = new File(((SvnRepositoryId) repoId).getSvnUrl()
.getPath());
versionFile = new File(getAuxiliaryDir(repo), lastVersionFile);
- in = new BufferedReader(new InputStreamReader(new
FileInputStream(
- versionFile)));
- String line = in.readLine();
+ String line;
+ synchronized(this) {
+ in = new BufferedReader(new InputStreamReader(new
FileInputStream(
+ versionFile)));
+ line = in.readLine();
+ }
if (line == null) // EOF fallback solution
return repositoryManager.getRepository(repoId)
.getEmptyVersion();
@@ -399,7 +413,9 @@
/**
* @brief Set the last indexed version.
*
- * This routine will be called from the Indexer after accomplished work.
+ * This method will be called from the Indexer after accomplished work.
+ *
+ * This method must be thread safe!
*
* @param repoVersion
* The last indexed repository version.
@@ -415,11 +431,13 @@
.getPath());
versionFile = new File(getAuxiliaryDir(repo), lastVersionFile);
SvnVersion lastVersion = (SvnVersion) repoVersion;
- PrintStream out = new PrintStream(versionFile);
- try {
- out.println(lastVersion.toString());
- } finally {
- out.close();
+ synchronized (this) {
+ PrintStream out = new PrintStream(versionFile);
+ try {
+ out.println(lastVersion.toString());
+ } finally {
+ out.close();
+ }
}
} catch (ClassCastException ex) {
DmsException.throwIt(ErrorType.INTERNAL, this, "Bug detected",
@@ -444,13 +462,17 @@
*/
public void registerContentExtractor(String mimeType,
ContentExtractorFactory extractor) {
- extractors.put(mimeType, extractor);
+ synchronized(extractors) {
+ extractors.put(mimeType, extractor);
+ }
logger.fine("Registered content extractor for " + mimeType);
}
/**
* @brief Retrieve content extractor.
*
+ * This method must be thread safe!
+ *
* @param mimeType
* The mime type that the extractor should be able to
handle.
* @param rawContent
@@ -462,7 +484,10 @@
public ContentExtractor getContentExtractor(String mimeType,
byte[] rawContent, Map<String, String> metaData)
throws DmsException {
- ContentExtractorFactory cef = extractors.get(mimeType);
+ ContentExtractorFactory cef;
+ synchronized(extractors) {
+ cef = extractors.get(mimeType);
+ }
if (cef == null)
return null;
return cef.getContentExtractor(rawContent, metaData);
@@ -471,16 +496,22 @@
/**
* @brief Check if content extractor exists for a given mime type.
*
+ * This method must be thread safe!
+ *
* @param mimeType
* The given mime type.
*/
public boolean contentExtractorExistsFor(String mimeType) {
- return extractors.get(mimeType) != null;
+ synchronized(extractors) {
+ return extractors.get(mimeType) != null;
+ }
}
/**
* @brief Retrieve the indexing mode.
*
+ * This method must be thread safe!
+ *
* @see dmsd.config.getIndexingMode()
*/
public IndexingMode getIndexingMode() {
@@ -496,7 +527,7 @@
* @throws DmsException
* If connectNoOp() fails.
*/
- void removePipeline(File repo) throws DmsException {
+ private void removePipeline(File repo) throws DmsException {
if (!pipelineParts.containsKey(repo))
return;
// TODO Make the next steps atomic
@@ -522,7 +553,7 @@
* @throws DmsException
* If some pipeline stage cannot be created.
*/
- PipelinePart createPipelinePart(RepositoryId repositoryId, File
indexFile)
+ private PipelinePart createPipelinePart(RepositoryId repositoryId, File
indexFile)
throws DmsException {
SimpleUpdateListGenerator updateListGenerator = new
SimpleUpdateListGenerator(
repositoryId);
@@ -558,7 +589,7 @@
* @param newPart
* The new pipeline part.
*/
- void addPipeline(File repo, PipelinePart newPart) {
+ private void addPipeline(File repo, PipelinePart newPart) {
// TODO Make the next steps atomic
pipelineParts.put(repo, newPart);
tailQueue.connectHead(newPart.tail);
@@ -801,6 +832,8 @@
* 2) Otherwise the auxiliary directory will be constructed according to
the
* convention Auxiliary(/X/Y/Z) := repoRootDir/Y/.Z
*
+ * This method must be thread safe!
+ *
* @param repo
* The canonical file system path for the repository.
* @return The auxiliary directory for the given repository.
@@ -811,9 +844,11 @@
if (repoName.charAt(0) != '.')
repoName = "." + repoName;
File auxParent = new File(auxDir, repoParent);
- if (!auxParent.isDirectory()) {
- logger.finest("Creating directory " + auxParent.getName());
- auxParent.mkdir();
+ synchronized(this) {
+ if (!auxParent.isDirectory()) {
+ logger.finest("Creating directory " + auxParent.getName());
+ auxParent.mkdir();
+ }
}
File auxFile = new File(auxParent, repoName);
logger.finest("Auxiliary directory for " + repo + " is " + auxFile);
@@ -826,6 +861,8 @@
* All documents must be stored together with their mime type. The mime
type
* can be retrieved with the metadata key retrie- veable by this method.
*
+ * This method must be thread safe!
+ *
* @return The meta data key for the mime type meta datum.
* @pre initialize() must be called first.
*/
Modified: trunk/java/ch/idok/dmsd/impl/controller/SimpleController.java
==============================================================================
--- trunk/java/ch/idok/dmsd/impl/controller/SimpleController.java
(original)
+++ trunk/java/ch/idok/dmsd/impl/controller/SimpleController.java Tue
Jun 23 11:04:51 2009
@@ -28,6 +28,10 @@
import java.io.StringWriter;
import java.util.HashMap;
import java.util.Map;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Level;
import java.util.logging.Logger;
@@ -95,7 +99,7 @@
*
* This variable is set to 0 by requestFastCycle()
*/
- private int waitCycles;
+ volatile private int waitCycles;
/**
* @brief Local monitoring data.
@@ -108,6 +112,16 @@
private StringBuilder monitorValues = null;
/**
+ * @brief Number of active events
+ */
+ AtomicInteger activeEvents;
+
+ /**
+ * @brief Event Loop Executor Service
+ */
+ ExecutorService executor;
+
+ /**
* @brief Mime key for the original mime type.
*
* When this controller encouters an unknown mime type, the original mime
@@ -122,6 +136,43 @@
*/
HashMap<String, Double> accum = new HashMap<String, Double>();
+ /**
+ * @brief Event Handler
+ */
+ private class EventHandler implements Runnable {
+ /**
+ * @brief The event to be handled
+ */
+ private Event event;
+
+ /**
+ * @brief Handle event
+ */
+ @Override
+ public void run() {
+ int aev;
+ String tname = Thread.currentThread().getName();
+ try {
+ event.handle();
+ event.dispose();
+ } catch (Throwable th) {
+ logger.warning("Event Handler "+tname+"\n"+
+ "Exception during event handling
"+th+"\n"+Util.stackTraceToString(th));
+ } finally {
+ aev = activeEvents.decrementAndGet();
+ }
+ logger.finest("Event Handler "+tname+" - active events: "+aev);
+ }
+
+ /**
+ * @brief Constructor
+ * @param ev The event to be handled
+ */
+ public EventHandler(Event ev) {
+ event = ev;
+ }
+ }
+
/** Constructor */
public SimpleController() {
super("Simple Pipeline Controller");
@@ -130,6 +181,8 @@
monitor.addCounter("operation.indexed.documents", 0);
monitor.addCounter("operation.exceptions", 0);
new SimpleControllerManager(this);
+ activeEvents = new AtomicInteger(0);
+ executor = Executors.newFixedThreadPool(32);
}
// -------------- Error Handling ------------------------
@@ -153,18 +206,25 @@
/**
* @brief Pipeline error handling.
*
- * This method should not throw any exceptions of its own! - Check the
data
- * item for its integrity. - Check for recoverable errors: unknown or
- * missing mime type. Send the data item back to the pipeline with the
mime
- * type set to "default". - Check for unrecoverable indexing errors: No
- * index access. Signal DOWNSTREAM_PROBLEM to stop indexing for the
- * repository. - Internal errors: print a stack trace. - Drop the
document.
+ * This method should not throw any exceptions of its own!
+ *
+ * - Check the data item for its integrity.
+ * - Check for recoverable errors:
+ * unknown or missing mime type. Send the data item back to the
pipeline
+ * with the mime type set to "default".
+ * - Check for unrecoverable indexing errors:
+ * No index access. Signal DOWNSTREAM_PROBLEM to stop indexing
+ * for the repository.
+ * - Internal errors: print a stack trace.
+ * - Drop the document.
+ *
+ * This method must be thread safe!
*
* @param data
* The pipeline data item that caused an error.
*/
@Override
- public void handlePipelineError(PipelineData data) {
+ synchronized public void handlePipelineError(PipelineData data) {
monitor.incCounter("operation.exceptions", 1);
try {
@@ -266,11 +326,13 @@
* signal to the problematic stage and upwards. The disabling logic is
* located in the ch.idok.dmsd.impl.updatelist.SimpleUpdateListGenerator.
*
+ * This method must be thread safe!
+ *
* @param ex
* The description object for the pipeline signal
exception.
*/
@Override
- public void handleSignalingError(PipelineSignalException ex) {
+ synchronized public void handleSignalingError(PipelineSignalException
ex) {
try {
Level level = Level.WARNING;
StringBuffer sb = new StringBuffer();
@@ -313,11 +375,13 @@
* This method should only be called from a pipeline queue or the error
* handler of the pipeline controller.
*
+ * This method must be thread safe!
+ *
* @param data
* The pipeline data item.
*/
@Override
- public void push(PipelineData data) {
+ synchronized public void push(PipelineData data) {
logger.fine("Indexed " + data.repositoryChange.path.getName());
monitor.incCounter("operation.indexed.documents", 1);
data.dispose();
@@ -345,13 +409,15 @@
* queue. When all parts have finished, the controller is informed, with
the
* last finished pipeline stage as the originator.
*
+ * This method must be thread safe!
+ *
* @param signal
* The pipeline signal.
* @param originator
* The originator of the pipeline signal.
*/
@Override
- public void processSignal(PipelineSignal signal)
+ synchronized public void processSignal(PipelineSignal signal)
throws DmsException {
logger.fine("Received signal " + signal + " from " +
signal.originator);
if (signal.type == Signal.FINISHED) {
@@ -372,22 +438,33 @@
* @param reactToStop React to external stop requests
*/
void eventLoop(boolean reactToStop) {
- Event event = EventQueue.dequeue();
- while (event != null) {
- if (reactToStop && stopnow) {
- EventQueue.flush();
- break;
- }
- try {
- event.handle();
- event.dispose();
- event = EventQueue.dequeue();
- } catch (Throwable th) {
- StringBuffer msg = new StringBuffer("Exception in event
loop:\n");
- logger.warning(Util.getStackTrace(msg, th).toString());
+ Thread me = Thread.currentThread();
+ me.setPriority(Thread.NORM_PRIORITY - 1);
+ int aev;
+ do {
+ aev = activeEvents.get();
+ Event event = EventQueue.dequeue();
+ while (event != null) {
+ if (reactToStop && stopnow) {
+ EventQueue.flush();
+ try {
+ executor.awaitTermination(60, TimeUnit.SECONDS);
+ } catch (InterruptedException ex) { /*ignore*/ }
+ break;
+ }
+ try {
+ activeEvents.incrementAndGet();
+ executor.execute(new EventHandler(event));
+ aev = activeEvents.get();
+ event = EventQueue.dequeue();
+ } catch (Throwable th) {
+ StringBuffer msg = new StringBuffer("Exception in event
loop:\n");
+ logger.warning(Util.getStackTrace(msg, th).toString());
+ }
}
Thread.yield();
- }
+ } while(aev > 0);
+ me.setPriority(Thread.NORM_PRIORITY);
}
/**
@@ -493,6 +570,8 @@
/**
* @brief Start the next indexing cycle without any delay.
+ *
+ * This method must be thread safe!
*/
@Override
public void requestFastCycle() {
@@ -503,11 +582,12 @@
* @brief Display monitoring data.
*
* Write all counters and timers to the log.
+ * This method must be thread safe!
*
* @param data
* The monitoring data.
*/
- public void handleMonitoringData(MonitoringData data) {
+ synchronized public void handleMonitoringData(MonitoringData data) {
double value;
if (monitorValues == null)
monitorValues = new StringBuilder("\n");
Modified: trunk/java/ch/idok/dmsd/impl/extractor/Extractor.java
==============================================================================
--- trunk/java/ch/idok/dmsd/impl/extractor/Extractor.java (original)
+++ trunk/java/ch/idok/dmsd/impl/extractor/Extractor.java Tue Jun 23
11:04:51 2009
@@ -74,11 +74,13 @@
* content extractor for this mime type. The text content is then
extracted
* and the meta data modified.
*
+ * This method must be thread safe!
+ *
* @param data
* The pipeline data item to be processed.
*/
@Override
- public void push(PipelineData data) {
+ synchronized public void push(PipelineData data) {
try {
data.stage = this;
monitor.resumeTimer("extractor.time");
@@ -153,11 +155,15 @@
}
/**
- * @brief Handle pipeline signals. - GET_COUNTERS: Call the monitoring
+ * @brief Handle pipeline signals.
+ *
+ * - GET_COUNTERS: Call the monitoring
* manager with the local monitoring data.
+ *
+ * This method must be thread safe!
*/
@Override
- public void processSignal(PipelineSignal signal)
+ synchronized public void processSignal(PipelineSignal signal)
throws DmsException {
logger.finest("Received signal " + signal + " from " +
signal.originator);
if (signal.type == Signal.GET_COUNTERS)
Modified:
trunk/java/ch/idok/dmsd/impl/extractor/microsoft/MSOfficeExtractorFactory.java
==============================================================================
---
trunk/java/ch/idok/dmsd/impl/extractor/microsoft/MSOfficeExtractorFactory.java
(original)
+++
trunk/java/ch/idok/dmsd/impl/extractor/microsoft/MSOfficeExtractorFactory.java
Tue Jun 23 11:04:51 2009
@@ -122,6 +122,9 @@
*/
long streamReaderTimeout = interruptDelay;
+ /** @brief Object for serializing OO access */
+ static final Object serializer = new Object();
+
/**
* @brief Class implementing a thread for reading an error stream.
*/
@@ -211,6 +214,8 @@
/**
* @brief The content extractor class for microsoft office documents.
+ *
+ * This class must serialize access to OO!
*/
private final class Exctractor implements ContentExtractor {
@@ -274,7 +279,9 @@
boolean retry = false;
do {
try {
- res = tryGetText(tf.getCanonicalPath());
+ synchronized (serializer) {
+ res = tryGetText(tf.getCanonicalPath());
+ }
return res;
} catch (DmsException ex) {
if (retry)
Modified: trunk/java/ch/idok/dmsd/impl/indexer/lucene/Indexer.java
==============================================================================
--- trunk/java/ch/idok/dmsd/impl/indexer/lucene/Indexer.java (original)
+++ trunk/java/ch/idok/dmsd/impl/indexer/lucene/Indexer.java Tue Jun 23
11:04:51 2009
@@ -180,11 +180,13 @@
* Indexable fields are all meta data keys and their values, and the
* "content" field with the extracted document text.
*
+ * This method must be thread safe!
+ *
* @param data
* The pipeline data item.
*/
@Override
- public void push(PipelineData data) {
+ synchronized public void push(PipelineData data) {
try {
data.stage = this;
if (operationStopped)
@@ -310,9 +312,13 @@
}
/**
- * @brief Process pipline signals. - START: Open/Create the Lucene
index. -
- * STOP: Close the index and optimize it. - GET_COUNTERS: Send the
- * local monitoring data to the monitoring manager.
+ * @brief Process pipline signals.
+ *
+ * - START: Open/Create the Lucene index.
+ * - STOP: Close the index and optimize it.
+ * - GET_COUNTERS: Send the local monitoring data to the monitoring
manager.
+ *
+ * This method must be thread safe!
*
* @param signal
* The pipline signal.
@@ -320,7 +326,7 @@
* The originator of the pipline signal.
*/
@Override
- public void processSignal(PipelineSignal signal)
+ synchronized public void processSignal(PipelineSignal signal)
throws DmsException {
try {
logger.finest("Received signal " + signal + " from " +
signal.originator);
Modified: trunk/java/ch/idok/dmsd/impl/loader/ContentLoader.java
==============================================================================
--- trunk/java/ch/idok/dmsd/impl/loader/ContentLoader.java (original)
+++ trunk/java/ch/idok/dmsd/impl/loader/ContentLoader.java Tue Jun 23
11:04:51 2009
@@ -86,11 +86,13 @@
* The data.documentContent and data.documentMetadata fields of the
pipeline
* data item must be filled out by this method.
*
+ * This method must be thread safe!
+ *
* @param data
* The pipeline data item.
*/
@Override
- public void push(PipelineData data) {
+ synchronized public void push(PipelineData data) {
try {
data.stage = this;
monitor.resumeTimer("loader.time");
@@ -196,13 +198,15 @@
* Process pipeline signals. - GET_COUNTERS: Send the local monitoring
data
* to the monitoring manager.
*
+ * This method must be thread safe!
+ *
* @param signal
* The pipline signal.
* @param originator
* The originator of the signal.
*/
@Override
- public void processSignal(PipelineSignal signal)
+ synchronized public void processSignal(PipelineSignal signal)
throws DmsException {
logger.finest("Received signal " + signal + " from " +
signal.originator);
if (signal.type == Signal.GET_COUNTERS)
Modified: trunk/java/ch/idok/dmsd/impl/queue/DataEvent.java
==============================================================================
--- trunk/java/ch/idok/dmsd/impl/queue/DataEvent.java (original)
+++ trunk/java/ch/idok/dmsd/impl/queue/DataEvent.java Tue Jun 23 11:04:51
2009
@@ -35,8 +35,8 @@
* @brief Constructor
* @param item The pipeline data item
*/
- private DataEvent(PipelineData item) {
- super(DATA);
+ private DataEvent(PipelineData item, int priority) {
+ super(DATA, priority);
data = item;
}
@@ -44,7 +44,7 @@
* @brief Create a signaling event object
*/
public static DataEvent getInstance(PipelineData data) {
- return new DataEvent(data);
+ return new DataEvent(data, EventPriority.data);
}
/**
Modified: trunk/java/ch/idok/dmsd/impl/queue/Event.java
==============================================================================
--- trunk/java/ch/idok/dmsd/impl/queue/Event.java (original)
+++ trunk/java/ch/idok/dmsd/impl/queue/Event.java Tue Jun 23 11:04:51
2009
@@ -24,7 +24,7 @@
/**
* @brief Generic event
*/
-public abstract class Event {
+public abstract class Event implements Comparable<Event> {
/** @brief DATA event type */
public static final int DATA = 0;
/** @brief SIGNAL event type */
@@ -33,6 +33,9 @@
/** @brief Event type of this event */
public int type;
+ /** @brief Event priority */
+ public int priority;
+
/**
* @brief Handle this event
* @throws DmsException If the event cannot be handled properly
@@ -52,10 +55,24 @@
public abstract void clear();
/**
+ * @brief Comparator method
+ *
+ * @param o The other event
+ * @return
+ * - less than 0 if this has higher priority than o
+ * - 0 if this is the same priority as o
+ * - more than 0 if this has lower priority than o
+ */
+ public int compareTo(Event o) {
+ return priority - o.priority;
+ }
+
+ /**
* @brief Constructor
* @param kind Which event type is this?
*/
- public Event(int kind) {
+ public Event(int kind, int prio) {
type = kind;
+ priority = prio;
}
}
Modified: trunk/java/ch/idok/dmsd/impl/queue/EventQueue.java
==============================================================================
--- trunk/java/ch/idok/dmsd/impl/queue/EventQueue.java (original)
+++ trunk/java/ch/idok/dmsd/impl/queue/EventQueue.java Tue Jun 23 11:04:51
2009
@@ -19,7 +19,8 @@
package ch.idok.dmsd.impl.queue;
-import java.util.concurrent.ConcurrentLinkedQueue;
+import java.util.AbstractQueue;
+import java.util.PriorityQueue;
import java.util.logging.Logger;
import ch.idok.dmsd.config.Setup;
@@ -42,31 +43,45 @@
* pipeline stages count on in order handling of
* events.
*/
- private ConcurrentLinkedQueue<Event> eventQueue;
+ private PriorityQueue<Event> eventQueue;
/**
* @brief Hidden constructor, this is a singleton
*/
private EventQueue() {
- eventQueue = new ConcurrentLinkedQueue<Event>();
+ eventQueue = new PriorityQueue<Event>();
}
/**
* @brief Enqueue an event
+ *
+ * May be called concurrently.
+ *
* @param event The event to be enqueued
*/
public static void enqueue(Event event) {
- instance().eventQueue.add(event);
+ EventQueue inst = instance();
logger.finest("Enqueueing event: "+event);
+ synchronized(inst) {
+ inst.eventQueue.add(event);
+ }
}
/**
* @brief Retrieve and remove an event from the queue
+ *
+ * May be called concurrently.
+ *
* @return An event on the queue
*/
public static Event dequeue() {
- Event event = instance().eventQueue.poll();
- logger.finest("Dequeueing event: "+event);
+ EventQueue inst = instance();
+ Event event;
+ synchronized(inst) {
+ event = inst.eventQueue.poll();
+ }
+ if (event != null)
+ logger.finest("Dequeueing event: "+event);
return event;
}
@@ -76,9 +91,9 @@
* @return The number of removed events
*/
public static int flush() {
- EventQueue inst = instance();
- int rval = inst.eventQueue.size();
- inst.eventQueue.clear();
+ AbstractQueue<Event> eq = eventQueue();
+ int rval = eq.size();
+ eq.clear();
logger.finest("Flushing " + rval + " events off the queue");
return rval;
}
@@ -91,5 +106,9 @@
return singleton;
}
+ private static AbstractQueue<Event> eventQueue() {
+ return instance().eventQueue;
+ }
+
private static EventQueue singleton;
}
Modified: trunk/java/ch/idok/dmsd/impl/queue/SignalEvent.java
==============================================================================
--- trunk/java/ch/idok/dmsd/impl/queue/SignalEvent.java (original)
+++ trunk/java/ch/idok/dmsd/impl/queue/SignalEvent.java Tue Jun 23 11:04:51
2009
@@ -23,6 +23,7 @@
import ch.idok.common.errorhandling.ErrorType;
import ch.idok.dmsd.pipeline.PipelineSignal;
import ch.idok.dmsd.pipeline.PipelineStage;
+import ch.idok.dmsd.pipeline.Signal;
/**
* @brief Signaling event
@@ -47,8 +48,8 @@
/**
* @brief Constructor
*/
- private SignalEvent(PipelineStage dest, PipelineSignal sig) {
- super(SIGNAL);
+ private SignalEvent(PipelineStage dest, PipelineSignal sig, int
priority) {
+ super(SIGNAL, priority);
destination = dest;
signal = sig;
}
@@ -64,7 +65,26 @@
if (sig == null)
DmsException.throwIt(ErrorType.INTERNAL, id,
"Bug detected", "SignalEvent.getInstance called with
invalid signal argument!");
- return new SignalEvent(dest, sig);
+ int prio = EventPriority.data;
+ if (sig.type == Signal.UPSTREAM_PROBLEM)
+ prio = EventPriority.urgent;
+ else if (sig.type == Signal.DOWNSTREAM_PROBLEM)
+ prio = EventPriority.urgent;
+ else if (sig.type == Signal.START)
+ prio = EventPriority.handleBeforeData;
+ else if (sig.type == Signal.COMMIT)
+ prio = EventPriority.handleAfterData1;
+ else if (sig.type == Signal.FINISHED)
+ prio = EventPriority.handleAfterData2;
+ else if (sig.type == Signal.GET_COUNTERS)
+ prio = EventPriority.handleAfterData3;
+ else if (sig.type == Signal.STOP)
+ prio = EventPriority.handleAfterData3;
+ else
+ DmsException.throwIt(ErrorType.INTERNAL, id,
+ "Bug detected", "Unknown signal " + sig.type);
+
+ return new SignalEvent(dest, sig, prio);
}
/**
Modified:
trunk/java/ch/idok/dmsd/impl/updatelist/SimpleUpdateListGenerator.java
==============================================================================
--- trunk/java/ch/idok/dmsd/impl/updatelist/SimpleUpdateListGenerator.java
(original)
+++ trunk/java/ch/idok/dmsd/impl/updatelist/SimpleUpdateListGenerator.java
Tue Jun 23 11:04:51 2009
@@ -142,11 +142,13 @@
* they have all been processed, the signal FINISHED is sent to the
pipeline
* controller.
*
+ * This method must be thread safe!
+ *
* @param data
* The pipeline data item.
*/
@Override
- public void push(PipelineData data) {
+ synchronized public void push(PipelineData data) {
try {
data.stage = this;
if (repository == null)
@@ -203,16 +205,18 @@
* is not able to START or index properly - emergency STOP. -
GET_COUNTERS:
* Send the local monitoring data to the monitoring manager.
*
- * In case of an emrgency STOP, the update list will be treated as if
empty,
+ * In case of an emergency STOP, the update list will be treated as if
empty,
* and the last indexed version will not be set (to an increased value).
*
+ * This method must be thread safe!
+ *
* @param signal
* The pipeline signal.
* @param originator
* The originator of the pipeline signal.
*/
@Override
- public void processSignal(PipelineSignal signal)
+ synchronized public void processSignal(PipelineSignal signal)
throws DmsException {
DmsException exception;
try {
Modified: trunk/java/ch/idok/dmsd/indexer/ContentExtractor.java
==============================================================================
--- trunk/java/ch/idok/dmsd/indexer/ContentExtractor.java (original)
+++ trunk/java/ch/idok/dmsd/indexer/ContentExtractor.java Tue Jun 23
11:04:51 2009
@@ -60,7 +60,7 @@
/**
* @brief Retriev the document meta data.
*
- * The content extractor might add new meta data.
+ * The content extractor might add new meta data.
*
* @return The (modified) document meta data.
* @throws DmsException
Modified: trunk/java/ch/idok/dmsd/indexer/ContentExtractorFactory.java
==============================================================================
--- trunk/java/ch/idok/dmsd/indexer/ContentExtractorFactory.java
(original)
+++ trunk/java/ch/idok/dmsd/indexer/ContentExtractorFactory.java Tue
Jun 23 11:04:51 2009
@@ -37,6 +37,8 @@
/**
* @brief Retrieve the mime type which can be handled by the objects that
* this factory creates.
+ *
+ * May be called concurrently.
*
* @return The mime type which can be handled by objects that this
factory
* creates.
@@ -64,6 +66,8 @@
/**
* @brief Create a new content extractor instance.
*
+ * May be called concurrently.
+ *
* @param rawContent
* The raw document content that should be handled by the
* created extractor.
Modified: trunk/java/ch/idok/dmsd/monitoring/MonitoringHandler.java
==============================================================================
--- trunk/java/ch/idok/dmsd/monitoring/MonitoringHandler.java (original)
+++ trunk/java/ch/idok/dmsd/monitoring/MonitoringHandler.java Tue Jun 23
11:04:51 2009
@@ -30,6 +30,8 @@
/**
* @brief Handle monitoring data.
*
+ * May be called concurrently.
+ *
* @param data
* The collected monitoring data.
*/
Modified: trunk/java/ch/idok/dmsd/pipeline/PipelineController.java
==============================================================================
--- trunk/java/ch/idok/dmsd/pipeline/PipelineController.java (original)
+++ trunk/java/ch/idok/dmsd/pipeline/PipelineController.java Tue Jun 23
11:04:51 2009
@@ -44,6 +44,13 @@
super(name);
}
+ /**
+ * @brief Handle signal.
+ *
+ * This may be called concurrently.
+ *
+ * @param signal The signal to be handled.
+ */
@Override
public void signal(PipelineSignal signal)
throws DmsException {
@@ -66,6 +73,7 @@
* appropriately.
*
* This method must not throw any exceptions of its own!
+ * This method may be called concurrently.
*
* @param data
* The pipeline data item which caused the trouble.
@@ -83,6 +91,7 @@
* way, that enables index maintainers to react appropriately.
*
* This method must not throw any exceptions of its own!
+ * This method may be called concurrently.
*
* @param ex
* The description object for the pipeline signal
exception.
@@ -94,6 +103,8 @@
*
* Normally ther will be a break between indexing cycles. Calling this
* function will eliminate this delay.
+ *
+ * This method may be called concurrently.
*/
public abstract void requestFastCycle();
}
Modified: trunk/java/ch/idok/dmsd/pipeline/PipelineQueue.java
==============================================================================
--- trunk/java/ch/idok/dmsd/pipeline/PipelineQueue.java (original)
+++ trunk/java/ch/idok/dmsd/pipeline/PipelineQueue.java Tue Jun 23 11:04:51
2009
@@ -54,6 +54,8 @@
/**
* @brief Receive a pipeline data item.
*
+ * This method may be called concurrently.
+ *
* @param stage
* The pipeline stage from which the data item is
received.
* @param data
@@ -145,6 +147,8 @@
* signal.direction is UP, push the signal upstream. - If
signal.direction
* is DOWN, push the signal downstream. - Otherwise drop the signal.
*
+ * This method may be called concurrently.
+ *
* @param signal
* The pipeline signal to be distributed.
* @param originator
Modified: trunk/java/ch/idok/dmsd/pipeline/PipelineStage.java
==============================================================================
--- trunk/java/ch/idok/dmsd/pipeline/PipelineStage.java (original)
+++ trunk/java/ch/idok/dmsd/pipeline/PipelineStage.java Tue Jun 23 11:04:51
2009
@@ -143,6 +143,8 @@
* signals are sent to the incoming queue. - Nonpropagating signals are
* droped.
*
+ * This method may be called concurrently.
+ *
* @param signal
* The pipeline signal to handle.
* @param originator
@@ -188,6 +190,8 @@
* This method should only be called by the incomming pipeline queue or
the
* error handler of the pipeline controller.
*
+ * This method may be called concurrently.
+ *
* @param data
* The pipeline data item to be processed by this stage.
*/
@@ -207,6 +211,8 @@
/**
* @brief Process pipeline signals.
*
+ * This method must be implemented in a thread safe way!
+ *
* @param signal
* The pipeline signal to handle.
* @param originator
Modified:
trunk/java/ch/idok/service/server/search/lucene/LuceneSearchService.java
==============================================================================
--- trunk/java/ch/idok/service/server/search/lucene/LuceneSearchService.java
(original)
+++ trunk/java/ch/idok/service/server/search/lucene/LuceneSearchService.java
Tue Jun 23 11:04:51 2009
@@ -51,6 +51,7 @@
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.TokenGroup;
+import org.apache.lucene.store.FSDirectory;
import ch.idok.common.config.Setup;
import ch.idok.common.errorhandling.DmsException;
@@ -605,8 +606,10 @@
throws DmsException {
IndexReader indexReader = null;
try {
- String indexStr = getIndexString(repository);
- indexReader = IndexReader.open(indexStr);
+ File indexDir = new File(getIndexString(repository));
+ if (! indexDir.exists())
+ DmsException.throwIt(ErrorType.INDEX_ACCESS, this, "Index
for "+repository+" doesn't exist", "Index directory doesn't exist");
+ indexReader =
IndexReader.open(FSDirectory.getDirectory(indexDir), true);
Collection<Object> fields = indexReader
.getFieldNames(FieldOption.ALL);
String[] res = new String[fields.size()];
@@ -615,6 +618,8 @@
res[fieldIdx++] = (String) field;
}
return res;
+ } catch (DmsException ex) {
+ throw ex;
} catch (IOException ex) {
throw new DmsException(ErrorType.INDEX_ACCESS, this,
"Cannot open index for " + repository,
Modified: trunk/local-server.xml
==============================================================================
--- trunk/local-server.xml (original)
+++ trunk/local-server.xml Tue Jun 23 11:04:51 2009
@@ -26,6 +26,8 @@
<property name="xvfb.exec" value="Xvfb" />
<property name="indexer.heapsize.max" value="800m" />
+ <property name="indexer.soffice.path" value="soffice" />
+ <property name="indexer.soffice.interruptdelay" value="20000" />
<property name="indexer.ooffice.screen" value=":0" />
<property name="indexer.oouno.port" value="1088" />
<property name="indexer.mimetypes" value="misc/mime.types" />
@@ -33,7 +35,8 @@
<property name="indexer.auxiliary.dir"
value="${server.index.root.dir}" />
<property name="indexer.loglevel" value="INFO" />
<property name="indexer.index.name" value="lucene-index" />
-
+ <property name="indexer.svnurl"
value="file://${server.project.root.dir}" />
+
<fileset id="idok.libs.server" dir="">
<include name="ant_dist/idok.jar" />
<include name="lib/svnkit.jar" />
@@ -399,9 +402,13 @@
value="${indexer.restartinterval}" />
<sysproperty
key="ch.idok.dmsd.impl.config.svnlucene.auxdir"
value="${server.index.root.dir}" />
+ <sysproperty
key="ch.idok.dmsd.impl.extractor.microsoft.soffice"
+
value="${indexer.soffice.path}" />
+ <sysproperty
key="ch.idok.dmsd.impl.extractor.microsoft.interruptdelay"
+ value="${indexer.soffice.interruptdelay"
/>
<arg line="daemon ${indexer.loglevel}
ch.idok.dmsd.impl.config.svnlucene.Config" />
<arg file="${server.project.root.dir}" />
- <arg line="${indexer.index.name} last-indexed-version
all file://${server.project.root.dir}" />
+ <arg line="${indexer.index.name} last-indexed-version
all ${indexer.svnurl}" />
</java>
</target>
- [idok-commit] idok commit r369 - in trunk: . java/ch/idok/common/config java/ch/idok/common/impl/repository/svn java/ch/idok/common/repository java/ch/idok/dmsd/config java/ch/idok/dmsd/impl/config/svnlucene java/ch/idok/dmsd/impl/controller java/ch/idok/dmsd/impl/extractor java/ch/idok/dmsd/impl/extractor/microsoft java/ch/idok/dmsd/impl/indexer/lucene java/ch/idok/dmsd/impl/loader java/ch/idok/dmsd/impl/queue java/ch/idok/dmsd/impl/updatelist java/ch/idok/dmsd/indexer java/ch/idok/dmsd/monitoring java/ch/idok/dmsd/pipeline java/ch/idok/service/server/search/lucene, AFS account Stadler Hans Christian, 06/23/2009
Archive powered by MHonArc 2.6.19.