idok-commit AT lists.psi.ch
Subject: Commit emails of the iDok project
List archive
[idok-commit] idok commit r691 - in trunk/dmsd: . ch/psi/dms/dmsd/config ch/psi/dms/dmsd/impl/config/svnlucene dist
Chronological Thread
- From: "Apache" <apache AT savannah.psi.ch>
- To: idok-commit AT lists.psi.ch
- Subject: [idok-commit] idok commit r691 - in trunk/dmsd: . ch/psi/dms/dmsd/config ch/psi/dms/dmsd/impl/config/svnlucene dist
- Date: Wed, 30 Jan 2008 14:34:44 +0100
- List-archive: <https://lists.web.psi.ch/pipermail/idok-commit/>
- List-id: Commit emails of the iDok project <idok-commit.lists.psi.ch>
Author: stadler_h AT PSI.CH
Date: Wed Jan 30 14:34:43 2008
New Revision: 691
Log:
Tidying up the reconfiguration method and adding logic to remove stale
repositories. New pdfbox library dependencies added. Close stdin where
possible.
Modified:
trunk/dmsd/build.xml
trunk/dmsd/ch/psi/dms/dmsd/config/Util.java
trunk/dmsd/ch/psi/dms/dmsd/impl/config/svnlucene/Config.java
trunk/dmsd/dist/run-dmsd.sh
Modified: trunk/dmsd/build.xml
==============================================================================
--- trunk/dmsd/build.xml (original)
+++ trunk/dmsd/build.xml Wed Jan 30 14:34:43 2008
@@ -43,6 +43,9 @@
<property name="jsvn" value="${distlib}/svnkit.jar"/>
<property name="lucene" value="${distlib}/lucene-core.jar"/>
<property name="pdf" value="${distlib}/PDFBox.jar"/>
+ <property name="fontbox" value="${distlib}/FontBox.jar"/>
+ <property name="bcprov" value="${distlib}/bcprov.jar"/>
+ <property name="bcmail" value="${distlib}/bcmail.jar"/>
<property name="uno-1" value="${unodir}/unoil.jar"/>
<property name="uno-2" value="${unodir}/ridl.jar"/>
<property name="uno-3" value="${unodir}/juh.jar"/>
@@ -53,7 +56,7 @@
<!-- Library sets -->
<property name="uno-all" value="${uno-1}:${uno-2}:${uno-3}:${uno-4}"/>
- <property name="lib-all"
value="${libcommon}:${jsvn}:${lucene}:${pdf}:${uno-all}:${exif}:${tar}"/>
+ <property name="lib-all"
value="${libcommon}:${jsvn}:${lucene}:${pdf}:${fontbox}:${bcprov}:${bcmail}:${uno-all}:${exif}:${tar}"/>
<target name="init" description="Initialize timestamp properties and
create necessary directories">
<echo message="initializing"/>
Modified: trunk/dmsd/ch/psi/dms/dmsd/config/Util.java
==============================================================================
--- trunk/dmsd/ch/psi/dms/dmsd/config/Util.java (original)
+++ trunk/dmsd/ch/psi/dms/dmsd/config/Util.java Wed Jan 30 14:34:43 2008
@@ -16,6 +16,32 @@
private Util() {
}
+ /**
+ * @brief Filter interface for controlling directory scanning.
+ */
+ static public interface Filter {
+ /**
+ * @brief Check the directory or file
+ *
+ * @param file
+ * The file to be checked
+ * @param depth
+ * The remaining scanning depth
+ * @return
+ * true if the file should be scanned or included
+ */
+ boolean check(File file, int depth);
+ /**
+ * @brief Transform included files
+ *
+ * @param file
+ * The file to be included
+ * @return
+ * The tranformed file
+ */
+ File transform(File file);
+ }
+
/**
* @brief Recursively scan a directory for subdirectories.
*
@@ -24,20 +50,22 @@
* @param depth
* The remaining scanning depth. If depth equals 0, root
is
* added to subDirs.
+ * @param filter
+ * Filter for inclusion checking
* @param subDirs
* Add subdirectories at desired depth to this set.
*/
- private static void scanDir(File root, int depth, SortedSet<File>
subDirs) {
- if (root.getName().charAt(0) == '.')
+ private static void scanDir(File root, int depth, Filter filter,
SortedSet<File> subDirs) {
+ if (! filter.check(root, depth))
return;
if (depth == 0) {
- subDirs.add(root);
+ subDirs.add(filter.transform(root));
return;
}
for (File file : root.listFiles()) {
if (file.isDirectory())
- scanDir(file, depth - 1, subDirs);
+ scanDir(file, depth-1, filter, subDirs);
}
}
@@ -48,26 +76,28 @@
* The directory that is to be scanned for subdirectories.
* @param level
* Scan subdirectories at a depth of level. If level is
less
- * or equal to one, the immediate subdirectories will be
- * returned.
- * @return The set of subdirectories of rootDir as a sorted set to
- * facilitate comparisons.
+ * or equal to zero, the rootDir itself will be returned.
+ * @param filter
+ * Filter for checking and transforming included files.
+ * @return
+ * The set of subdirectories of rootDir as a sorted
set to
+ * facilitate comparisons.
* @throws DmsException
* On IO errors or if rootDir isn't a directory.
*/
public static final SortedSet<File> getSubdirectories(File rootDir,
- int level) throws DmsException {
+ int level, Filter filter) throws DmsException {
try {
if (!rootDir.isDirectory())
DmsException.throwIt(ErrorType.BAD_ARG, singleton,
"Expected a directory as an argument",
"The rootDir argument is not a directory");
- if (level < 1)
- level = 1;
+ if (level < 0)
+ level = 0;
TreeSet<File> subDirs = new TreeSet<File>();
- scanDir(rootDir, level, subDirs);
+ scanDir(rootDir, level, filter, subDirs);
return subDirs;
} catch (DmsException ex) {
throw ex;
Modified: trunk/dmsd/ch/psi/dms/dmsd/impl/config/svnlucene/Config.java
==============================================================================
--- trunk/dmsd/ch/psi/dms/dmsd/impl/config/svnlucene/Config.java
(original)
+++ trunk/dmsd/ch/psi/dms/dmsd/impl/config/svnlucene/Config.java Wed
Jan 30 14:34:43 2008
@@ -3,13 +3,14 @@
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
-import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.net.URI;
+import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.Map;
+import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.logging.Logger;
@@ -100,9 +101,9 @@
/** @brief Specifies which documents are to be indexed. */
private IndexingMode indexingMode;
- /** @brief Number of existing repositories. */
- private int numRepos;
-
+ /** @brief Root of the auxiliary files directory. Indexes are kept in
<auxDir>/project/.repo/<indexDir> */
+ private File auxDir;
+
/** @brief The index directory name. */
private String indexDir;
@@ -121,18 +122,15 @@
*/
private NoOp noOp;
- /** @brief The existing set of repositories. */
- private TreeSet<File> repoDirs;
-
/** @brief The repository root directory. */
- private static File repoRootDir;
+ static File repoRootDir;
/** @brief The repository root URL. */
private String repoRootUrl;
/** @brief Container for holding a configurable pipeline part. */
private class PipelinePart {
- /** @brief The haed of the pipeline part. */
+ /** @brief The head of the pipeline part. */
public SimpleUpdateListGenerator head;
/** @brief The tail of the pipeline part. */
@@ -223,8 +221,6 @@
DAVRepositoryFactory.setup();
SVNRepositoryFactoryImpl.setup();
noOp = new NoOp();
- numRepos = 0;
- repoDirs = new TreeSet<File>();
pipelineParts = new HashMap<File, PipelinePart>();
// ----------------------------------
repoRootDir = new File(args[3]).getCanonicalFile();
@@ -232,6 +228,12 @@
DmsException
.throwIt(ErrorType.BAD_ARG, this, "Bad argument",
"Expected the path to a directory for the
repository root directory argument");
+ auxDir = new File (System.getProperty(
+ "ch.psi.dms.dmsd.impl.config.svnlucene.auxdir",
+ repoRootDir.getCanonicalPath()));
+ if (!auxDir.isDirectory())
+ DmsException.throwIt(ErrorType.BAD_ARG, this, "Bad argument",
+ "Expected the path to a directory for the
auxiliary files root directory argument");
indexDir = args[4];
lastVersionFile = args[5];
indexingMode = IndexingMode.parse(args[6]);
@@ -465,7 +467,79 @@
public IndexingMode getIndexingMode() {
return indexingMode;
}
-
+
+ /**
+ * @brief Remove the indexing pipeline for a repository.
+ *
+ * @param repo The repository for which the indexing pipeline should be
removed.
+ * @throws DmsException If connectNoOp() fails.
+ */
+ void removePipeline(File repo) throws DmsException {
+ if (! pipelineParts.containsKey(repo))
+ return;
+ // TODO Make the next steps atomic
+ if (pipelineParts.size() == 1)
+ connectNoOp(); // Connect NoOp stage, since the pipeline should
still
+ // be functional after having removed the last
indexing
+ // pipeline part.
+ PipelinePart pipelinePart = pipelineParts.remove(repo);
+ headQueue.removeTail(pipelinePart.head);
+ tailQueue.removeHead(pipelinePart.tail);
+ }
+
+ /**
+ * @brief Create a new indexing pipeline part.
+ *
+ * @param repositoryId The id of the repository to be indexed by this
pipeline part.
+ * @param indexFile The index directory file.
+ * @return The requested indexing pipeline part.
+ * @throws DmsException If some pipeline stage cannot be created.
+ */
+ PipelinePart createPipelinePart(RepositoryId repositoryId, File
indexFile)
+ throws DmsException {
+ SimpleUpdateListGenerator updateListGenerator =
+ new SimpleUpdateListGenerator(repositoryId);
+ updateListGenerator.initialize();
+ logger.fine("Created list generator");
+ SimplePipelineQueue queue = new SimplePipelineQueue();
+ queue.connectHead(updateListGenerator);
+ ContentLoader loader = new ContentLoader(
+ "Content Loader for " + repositoryId);
+ loader.initialize();
+ logger.fine("Created content loader");
+ queue.connectTail(loader);
+ queue = new SimplePipelineQueue();
+ queue.connectHead(loader);
+ Extractor extractor = new Extractor(
+ "Content Extractor for " + repositoryId);
+ extractor.initialize();
+ logger.fine("Created extractor");
+ queue.connectTail(extractor);
+ queue = new SimplePipelineQueue();
+ queue.connectHead(extractor);
+ Indexer indexer = new Indexer(indexFile);
+ indexer.initialize();
+ logger.fine("Created indexer");
+ queue.connectTail(indexer);
+ return new PipelinePart(updateListGenerator, indexer, repositoryId);
+ }
+
+ /**
+ * @brief Add a pipeline part to the pipeline.
+ * @param repo The repository indexed by the new pipeline part.
+ * @param newPart The new pipeline part.
+ */
+ void addPipeline(File repo, PipelinePart newPart) {
+ // TODO Make the next steps atomic
+ pipelineParts.put(repo, newPart);
+ tailQueue.connectHead(newPart.tail);
+ headQueue.connectTail(newPart.head);
+ if (pipelineParts.size() != 0) {
+ removeNoOp(); // Remove the NoOp stage if there was no
+ // prior indexing pipeline.
+ }
+ }
+
/**
* @brief Reconfigure the pipeline if necessary.
*
@@ -475,214 +549,183 @@
*/
public void reconfig(boolean init) throws DmsException {
try {
+ Util.Filter dot = new Util.Filter() {
+ // Ignore files and directories starting with dot
+ public boolean check(File file, int depth) {
+ return file.getName().charAt(0) != '.';
+ }
+ public File transform(File file) {
+ return file;
+ }
+ };
+ Util.Filter nodot = new Util.Filter() {
+ // Require the lowest level to start with dot, remove the dot
+ public boolean check(File file, int depth) {
+ if (depth == 0)
+ return file.getName().charAt(0) == '.';
+ return file.getName().charAt(0) != '.';
+ }
+ public File transform(File file) {
+ File parent = new File(repoRootDir,
file.getParentFile().getName());
+ return new File(parent, file.getName().substring(1));
+ }
+ };
logger.fine("Reconfiguring pipeline...");
- // The repositories are at level 2 from root directory
<root>/project/repo
- SortedSet<File> curRepos = Util.getSubdirectories(repoRootDir,
2);
- TreeSet<File> newRepos = new TreeSet<File>(curRepos);
- TreeSet<File> staleRepos = new TreeSet<File>(repoDirs);
+ // The current repositories are at level 2 from root directory
<root>/project/repo
+ // and the indexed repositories at level 2 from
auxiliary directory <auxdir>/project/.repo
+ SortedSet<File> curRepos = Util.getSubdirectories(repoRootDir,
2, dot);
+ SortedSet<File> indexedRepos = Util.getSubdirectories(auxDir, 2,
nodot);
+ TreeSet<File> newRepos = new TreeSet<File>();
+ TreeSet<File> staleRepos = new TreeSet<File>(indexedRepos);
+ Set<File> repoDirs = pipelineParts.keySet();
+ TreeSet<File> badRepos = new TreeSet<File>(repoDirs);
staleRepos.removeAll(curRepos);
- newRepos.removeAll(repoDirs);
- logger
- .finest("repoDirs: " + repoDirs + "\ncurRepos: " +
curRepos
- + "\nnewRepos: " + newRepos + "\nstaleRepos: "
- + staleRepos);
+ badRepos.removeAll(curRepos);
+ // curRepos = currently active repositories
+ // indexedRepos = repositories that have an index
+ // staleRepos = inactive repositories with an index
-> remove index
+ // repoDirs = repositories for which an indexing
pipeline exists
+ // badRepos = inactive repositories for which an
indexing pipeline exists
+ logger.finest(
+ "repoDirs: " + repoDirs +
+ "\ncurRepos: " + curRepos +
+ "\nindexedRepos: " + indexedRepos +
+ "\nstaleRepos: " + staleRepos +
+ "\nbadRepos: " + badRepos);
+ repoDirs = null;
+ indexedRepos = null;
+
+ // Remove stale indices
+ for (File repo : staleRepos) {
+ logger.info("Deleting index " + repo);
+ Util.deleteDir(getAuxiliaryDir(repo));
+ }
+ staleRepos = null;
+
+ // Remove stale pipelines
+ for (File repo : badRepos) {
+ logger.info("Removing indexing for " + repo);
+ removePipeline(repo);
+ }
+ badRepos = null;
// Check/fix existing repositories
- curRepos.removeAll(newRepos);
for (File repo : curRepos) {
logger.fine("Checking repository " + repo);
File dmsdDir = getAuxiliaryDir(repo);
File versionFile = new File(dmsdDir, lastVersionFile);
File indexFile = new File(dmsdDir, indexDir);
if (!dmsdDir.isDirectory()) {
- logger
- .info("Index for "
- + repo
- + " needs to be fixed - no auxiliary
directory present");
+ logger.info("Index for " + repo
+ + " needs to be fixed - no
auxiliary directory present");
} else if (!versionFile.isFile()) {
logger.info("Index for " + repo
- + " needs to be fixed - no version file
present");
+ + " needs to be fixed - no version file present");
} else if (!indexFile.isDirectory()) {
- logger
- .info("Index for "
- + repo
- + " needs to be fixed - no index
directory present");
+ logger.info("Index for " + repo
+ + " needs to be fixed - no index directory present");
} else {
continue;
}
newRepos.add(repo);
}
- curRepos = null;
-
- // Remove stale repositories
- for (File repo : staleRepos) {
- // TODO make the next operations atomic
- // If the only remaining pipeline part is about to be
removed,
- // connect the NoOp stage first
- logger.info("Removing repository " + repo);
- if (numRepos == 1)
- connectNoOp();
- PipelinePart pipelinePart = pipelineParts.remove(repo);
- assert (pipelinePart != null);
- headQueue.removeTail(pipelinePart.head);
- tailQueue.removeHead(pipelinePart.tail);
- --numRepos;
- logger.finest("Remaining repositories: " + numRepos);
- }
+ // newRepos = active repositories that have no good
index
+ logger.finest("newRepos: " + newRepos);
- // Prepare new repositories
- // init: Just build the pipeline, don't destroy the repository
- for (File repo : newRepos) {
- StringBuffer sb = new StringBuffer();
+ // Prepare repositories
+ // curRepos = currently active repositories
+ // newRepos = active repositories that have no good
index
+ for (File repo : curRepos) {
+ if (! newRepos.contains(repo) &&
pipelineParts.containsKey(repo))
+ continue;
+ // newRepos.contains(repo) OR NOT
pipelineParts.containsKey(repo)
+ logger.info("Adding repository " + repo);
PipelinePart newPart = null;
+ RepositoryId repositoryId = null;
+ File versionFile = null;
+ File indexFile = null;
+ PrintStream out = null;
+ File dmsdDir = null;
+ String repoUrl = null;
try {
- logger.info("Adding repository " + repo);
- String filePath = repo.getCanonicalPath();
- filePath = new String(filePath.substring(repoRootDir
- .getCanonicalPath().length()));
- String repoUrl = repoRootUrl + filePath; // TODO Make
- // this
platform
- //
independent
- // "\" must
be
- // converted
to
- // "/", etc.
- logger.finest(repo + " has URL " + repoUrl);
- RepositoryId repositoryId = repositoryManager
- .getRepositoryId(new URI(repoUrl));
- File dmsdDir = getAuxiliaryDir(repo);
- File versionFile = null;
- File indexFile = null;
try {
- if (!dmsdDir.isDirectory() && !dmsdDir.mkdir())
- DmsException.throwIt(ErrorType.FILE_ACCESS, this,
- "Cannot create dmsd directory",
- "mkdir() failed");
- logger.fine("Dmsd directory: " + dmsdDir);
+ String filePath = repo.getCanonicalPath();
+ filePath = new String(filePath.substring(repoRootDir
+ .getCanonicalPath().length()));
+ repoUrl = repoRootUrl + filePath; // TODO Make
+ // this platform
+ // independent
+ // "\" must be
+ // converted to
+ // "/", etc.
+ logger.finest(repo + " has URL " + repoUrl);
+ repositoryId = repositoryManager.getRepositoryId(new
URI(repoUrl));
+ } catch (IOException ex) {
+ logger.warning("getCanonicalPath() failed for " +
repo);
+ throw ex;
+ } catch (URISyntaxException ex) {
+ logger.warning("URI syntax error in " + repoUrl);
+ throw ex;
+ }
+ dmsdDir = getAuxiliaryDir(repo);
+ if (!dmsdDir.isDirectory() && !dmsdDir.mkdir())
+ // create indexer directory for the repository if it
doesn't exist
+ DmsException.throwIt(ErrorType.FILE_ACCESS, this,
+ "Cannot create dmsd directory",
+ "mkdir() failed");
+ logger.fine("Dmsd directory: " + dmsdDir);
+ indexFile = new File(dmsdDir, indexDir);
+ if (newRepos.contains(repo)) {
+ // prepare indexer directory
versionFile = new File(dmsdDir, lastVersionFile);
- indexFile = new File(dmsdDir, indexDir);
- if (init
- && (!versionFile.isFile() || !indexFile
- .isDirectory())) {
- if (!versionFile.isFile()
- && !versionFile.createNewFile())
- DmsException.throwIt(ErrorType.FILE_ACCESS,
- this,
- "Cannot create last version file",
- "createNewFile() failed");
- PrintStream out = new PrintStream(versionFile);
- try {
- out.println(repositoryManager.getRepository(
- repositoryId).getEmptyVersion()
- .toString());
- } finally {
- out.close();
- }
- if (indexFile.isDirectory()) {
- logger.finest("Deleting index file "
- + indexFile);
- Util.deleteDir(indexFile);
- }
+ try {
+ versionFile.createNewFile();
+ out = new PrintStream(versionFile);
+ } catch (IOException ex) {
+ DmsException.throwIt(ErrorType.FILE_ACCESS,
+ this,
+ "Cannot create last version file",
+ "createNewFile() failed");
+ }
+
out.println(repositoryManager.getRepository(repositoryId)
+ .getEmptyVersion().toString());
+ if (indexFile.isDirectory()) {
+ logger.finest("Deleting index file " +
indexFile);
+ Util.deleteDir(indexFile);
}
logger.fine("Version file: " +
versionFile.getPath());
logger.fine("Index file: " + indexFile.getPath());
- } catch (Throwable th) {
- logger.warning("Cannot create index for repository "
- + repo.getPath() + "\n" + th);
- continue;
}
- if (!repoDirs.contains(repo)) {
+ if (! pipelineParts.containsKey(repo)) {
+ // create indexing pipeline
logger.finest("Creating pipeline for " + repo);
- SimpleUpdateListGenerator updateListGenerator = new
SimpleUpdateListGenerator(
- repositoryId);
- updateListGenerator.initialize();
- logger.fine("Created list generator");
- SimplePipelineQueue queue = new
SimplePipelineQueue();
- queue.connectHead(updateListGenerator);
- ContentLoader loader = new ContentLoader(
- "Content Loader for " + repositoryId);
- loader.initialize();
- logger.fine("Created content loader");
- queue.connectTail(loader);
- queue = new SimplePipelineQueue();
- queue.connectHead(loader);
- Extractor extractor = new Extractor(
- "Content Extractor for " + repositoryId);
- extractor.initialize();
- logger.fine("Created extractor");
- queue.connectTail(extractor);
- queue = new SimplePipelineQueue();
- queue.connectHead(extractor);
- Indexer indexer = new Indexer(indexFile);
- indexer.initialize();
- logger.fine("Created indexer");
- queue.connectTail(indexer);
- newPart = new PipelinePart(updateListGenerator,
- indexer, repositoryId);
+ newPart = createPipelinePart(repositoryId,
indexFile);
+ addPipeline(repo, newPart);
} else {
logger.info("Pipeline already exists for " + repo);
continue;
}
- } catch (DmsException ex) {
- sb
- .append(ex.toString() + ":\n" +
ex.getLogMessage()
- + "\n");
- } catch (SecurityException ex) {
- sb
- .append("User: Cannot access config
files\nDetailed: Security exception while accessing config files\n"
- + ex.getLocalizedMessage());
- } catch (FileNotFoundException ex) {
- sb
- .append("User: Invalid file path\nDetailed: Last
version file path is invalid\n"
- + ex.getLocalizedMessage());
- } catch (IOException ex) {
- sb
- .append("User: Cannot access config
files\nDetailed: Writing to version file failed\n"
- + ex.getLocalizedMessage());
} catch (Throwable th) {
- sb.append("User: Bug detected\nDetailed: Internal
error\n"
- + th + "\n");
- ch.psi.dms.common.errorhandling.Util.getStackTrace(sb,
th);
- }
- if (sb.length() > 0) {
- sb.insert(0, "Exception while adding repository " + repo
- + ":\n");
- logger.warning(sb.toString());
- repoDirs.remove(repo);
- } else {
- assert (newPart != null);
+ logger.warning("Cannot create index for repository "
+ + repo.getPath() + "\n" + th);
+ try { removePipeline(repo); } catch (Throwable t)
{/*ignore*/}
try {
- pipelineParts.put(repo, newPart);
- tailQueue.connectHead(newPart.tail);
- headQueue.connectTail(newPart.head);
- } catch (Throwable th) {
- try {
- pipelineParts.remove(repo);
- } catch (Throwable t) {/* ignore */
- }
- try {
- tailQueue.removeHead(newPart.tail);
- } catch (Throwable t) {/* ignore */
- }
- try {
- headQueue.removeTail(newPart.head);
- } catch (Throwable t) {/* ignore */
- }
- throw th;
+ if (dmsdDir != null)
+ Util.deleteDir(dmsdDir);
+ } catch (Throwable t) {
+ logger.warning("Could not delete directory " +
dmsdDir);
}
- if (++numRepos == 1)
- removeNoOp(); // Remove the NoOp stage if there was
no
- // repository to be indexed
- logger.finest("Number of repositories: " + numRepos);
+ continue;
+ } finally {
+ try { out.close(); } catch (Throwable th) { /*ignore*/ }
}
}
- } catch (DmsException ex) {
+ logger.finest("Pipelines have been established for " +
pipelineParts.keySet());
+ } catch (DmsException ex) { // global try
throw ex;
} catch (Throwable th) {
DmsException.throwIt(ErrorType.INTERNAL, this, "Bug detected",
"Reconfiguration failed", th);
- } finally {
- repoDirs.clear();
- repoDirs.addAll(pipelineParts.keySet());
}
}
@@ -715,11 +758,7 @@
* The canonical file system path for the repository.
* @return The auxiliary directory for the given repository.
*/
- private static final File getAuxiliaryDir(File repo) throws IOException {
- String auxDir = System.getProperty(
- "ch.psi.dms.dmsd.impl.config.svnlucene.auxdir", repoRootDir
- .getCanonicalPath());
-
+ private File getAuxiliaryDir(File repo) {
String repoParent = repo.getParentFile().getName();
String repoName = repo.getName();
if (repoName.charAt(0) != '.')
Modified: trunk/dmsd/dist/run-dmsd.sh
==============================================================================
--- trunk/dmsd/dist/run-dmsd.sh (original)
+++ trunk/dmsd/dist/run-dmsd.sh Wed Jan 30 14:34:43 2008
@@ -72,7 +72,7 @@
${LVF_NAME} \
${INDEXING_MODE} \
${SVN_URL} \
- < /dev/null >> ${LOG_FILE} 2>&1
+ <&- >> ${LOG_FILE} 2>&1
(( (${?} != 1) || (${NUM_CYCLES_BEFORE_RESTART} < 0) )) && break
{ NEW_LOG_LEVEL=$(./loglevel-dmsd.sh get) &&
NEW_LOG_LEVEL=${NEW_LOG_LEVEL#logLevel=}; } ||
NEW_LOG_LEVEL=${DAEMON_LOG_LEVEL}
- [idok-commit] idok commit r691 - in trunk/dmsd: . ch/psi/dms/dmsd/config ch/psi/dms/dmsd/impl/config/svnlucene dist, Apache, 01/30/2008
Archive powered by MHonArc 2.6.19.