diff src/goodjava/lucene/logging/LoggingIndexWriter.java @ 1538:634f6765830e

use goodjava/lucene/logging
author Franklin Schmidt <fschmidt@gmail.com>
date Fri, 07 Aug 2020 21:42:16 -0600
parents 3bd4d7963456
children c27dc6af87ca
line wrap: on
line diff
--- a/src/goodjava/lucene/logging/LoggingIndexWriter.java	Fri Aug 07 13:38:25 2020 -0600
+++ b/src/goodjava/lucene/logging/LoggingIndexWriter.java	Fri Aug 07 21:42:16 2020 -0600
@@ -13,6 +13,7 @@
 import java.util.List;
 import java.util.ArrayList;
 import java.util.Random;
+import java.util.concurrent.TimeUnit;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexReader;
@@ -50,7 +51,7 @@
 	protected final List<LogFile> logs = new ArrayList<LogFile>();
 	private LogOutputStream log;
 	private final File index;
-	private boolean isMerging = false;
+	private final SemaphoreLock mergeLock = new SemaphoreLock();
 
 	public LoggingIndexWriter(LuceneIndexWriter indexWriter,File logDir) throws IOException {
 		this.indexWriter = indexWriter;
@@ -92,18 +93,30 @@
 			log.close();
 		log = logs.get(logs.size()-1).output();
 	}
-
+/*
 	public synchronized boolean isMerging() {
-		return isMerging;
+		return mergeLock.isLocked();
 	}
-
-	private synchronized void isNotMerging() {
-		isMerging = false;
+*/
+	private void getMergeLock() {
+		try {
+			if( !mergeLock.tryLock(1,TimeUnit.MINUTES) )
+				throw new RuntimeException("failed to acquire lock");
+		} catch(InterruptedException e) {
+			throw new RuntimeException(e);
+		}
 	}
 
 	public synchronized void newLogs() throws IOException {
-		if( isMerging )
-			throw new RuntimeException("merging");
+		getMergeLock();
+		try {
+			newLogs2();
+		} finally {
+			mergeLock.unlock();
+		}
+	}
+
+	private void newLogs2() throws IOException {
 		logger.info("building new logs");
 		logs.clear();
 		for( int i=0; i<2; i++ ) {
@@ -182,6 +195,8 @@
 
 	private void mergeLogs() throws IOException {
 		//logger.info("merge");
+		if( logs.size() <= 3 )
+			return;
 		LogFile first = logs.get(0);
 		LogFile second = logs.get(1);
 		long lastTime = second.file.lastModified();
@@ -210,7 +225,7 @@
 		} catch(IOException e) {
 			throw new RuntimeException(e);
 		} finally {
-			isNotMerging();
+			mergeLock.unlock();
 		}
 	} };
 
@@ -236,18 +251,19 @@
 
 	private volatile boolean isChecking = false;
 
-	public void check(SortField sortField) throws IOException {
+	public boolean check(SortField sortField) throws IOException {
 		if( isChecking )
 			throw new RuntimeException("another check is running");
 		isChecking = true;
 		try {
-			doCheck(sortField);
+			return doCheck(sortField);
 		} finally {
 			isChecking = false;
 		}
 	}
 
-	protected void doCheck(SortField sortField) throws IOException {
+	protected boolean doCheck(SortField sortField) throws IOException {
+		boolean ok = true;
 		IndexReader indexReader;
 		List<LogInputStream> logReaders;
 		synchronized(this) {
@@ -255,24 +271,26 @@
 			logReaders = logReaders(logs);
 		}
 		try {
-			logger.info("check start");
+			//logger.info("check start");
 			indexWriter.check();
 			File dirFile = new File(logDir,"check");
 			IoUtils.deleteRecursively(dirFile);
 			Directory dir = FSDirectory.open(dirFile);
 			LuceneIndexWriter checkWriter = new LuceneIndexWriter( dir, indexWriter.goodConfig );
 			playLogs(logReaders,checkWriter);
-			logger.info("check lucene");
+			//logger.info("check lucene");
 			IndexReader checkReader = checkWriter.openReader();
 			if( sortField == null ) {
 				int nCheck = checkReader.numDocs();
 				int nOrig = indexReader.numDocs();
 				if( nCheck != nOrig ) {
 					logger.error("numDocs mismatch: lucene="+nOrig+" logs="+nCheck);
+					ok = false;
 				}
-				logger.info("numDocs="+nOrig);
+				//logger.info("numDocs="+nOrig);
 				if( hash(indexReader) != hash(checkReader) ) {
 					logger.error("hash mismatch");
+					ok = false;
 				}
 			} else {
 				Sort sort = new Sort(sortField);
@@ -291,25 +309,30 @@
 							logger.error(sortFieldName+" "+origFld+" not equal");
 							logger.error("lucene = "+origFields);
 							logger.error("logs = "+checkFields);
+							ok = false;
 						}
 						origFields = LuceneUtils.toMap(origIter.next());
 						checkFields = LuceneUtils.toMap(checkIter.next());
 					} else if( cmp < 0 ) {
 						logger.error(sortFieldName+" "+origFld+" found in lucene but not logs");
+						ok = false;
 						origFields = LuceneUtils.toMap(origIter.next());
 					} else {  // >
 						logger.error(sortFieldName+" "+checkFld+" found in logs but not lucene");
+						ok = false;
 						checkFields = LuceneUtils.toMap(checkIter.next());
 					}
 				}
 				while( origFields!=null ) {
 					Comparable origFld = (Comparable)origFields.get(sortFieldName);
 					logger.error(sortFieldName+" "+origFld+" found in lucene but not logs");
+					ok = false;
 					origFields = LuceneUtils.toMap(origIter.next());
 				}
 				while( checkFields!=null ) {
 					Comparable checkFld = (Comparable)checkFields.get(sortFieldName);
 					logger.error(sortFieldName+" "+checkFld+" found in logs but not lucene");
+					ok = false;
 					checkFields = LuceneUtils.toMap(checkIter.next());
 				}
 				//logger.info("check done");
@@ -317,10 +340,11 @@
 			checkReader.close();
 			checkWriter.close();
 			IoUtils.deleteRecursively(dirFile);
-			logger.info("check done");
+			//logger.info("check done");
 		} finally {
 			indexReader.close();
 		}
+		return ok;
 	}
 
 	private static abstract class HashCollector extends GoodCollector {
@@ -350,7 +374,7 @@
 	public synchronized void commit() throws IOException {
 		indexWriter.commit();
 		log.commit();
-		if( isMerging )
+		if( mergeLock.isLocked() )
 			return;
 		if( log.logFile.end() > logs.get(0).end() ) {
 			logs.add( newLogFile() );
@@ -358,7 +382,7 @@
 			setLog();
 		}
 		if( logs.size() > 3 ) {
-			isMerging = true;
+			getMergeLock();
 			new Thread(mergeLogs).start();
 //			mergeLogs.run();
 		}