求:如何加快lucene写文件的效率(附代码)
package com.ffcs.nss.server.lucene;
import java.io.File;
import java.io.IOException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.Version;
import com.ffcs.nss.net.IP;
import com.ffcs.nss.util.Validators;
public class LuceneIndexer {
protected final Log logger = LogFactory.getLog(getClass());
private Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT); // 创建一个语法分析器
public static final int mergeFactor = 10000;
public static final int RAM_BUFFER_SIZE = 256;
private String dir;
private String json;
public static int num;
private List<EventCommon> events = new ArrayList<EventCommon>();
public LuceneIndexer(String dir, List<EventCommon> events) {
this.dir = dir;
this.events = events;
}
public void make() {
IndexWriter fsWriter = null;
Directory directory = null;
try {
String subdir = "lucene";
directory = FSDirectory.open(new File(dir + System.getProperty("file.separator") + subdir));
// create ram & fs writer
fsWriter = new IndexWriter(directory, analyzer, create, IndexWriter.MaxFieldLength.LIMITED);
fsWriter.setMergeFactor(mergeFactor);
fsWriter.setRAMBufferSizeMB(RAM_BUFFER_SIZE);
for (EventCommon ec : events) {
Document doc = new Document();
String businessid = ec.getBusinessid();
String placeid = ec.getPlaceid();
String placename = ec.getPlacename();
String domainid = ec.getDomainid();
String domainname = ec.getDomainname();
doc.add(new Field("businessid",
Validators.isNotnull(businessid) ? businessid : "",
Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.add(new Field("placeid",
Validators.isNotnull(placeid) ? placeid : "",
Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.add(new Field("placename",
Validators.isNotnull(placename) ? placename : "",
Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.add(new Field("domainid",
Validators.isNotnull(domainid) ? domainid : "",
Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.add(new Field("domainname",
Validators.isNotnull(domainname) ? domainname : "",
Field.Store.YES, Field.Index.NOT_ANALYZED));
// save document to index writer
fsWriter.addDocument(doc);
}
long end = System.currentTimeMillis();
System.out.println("-----optimize>"+(end));
fsWriter.optimize();
fsWriter.close();
} catch (IOException e) {
logger.error(e);
} finally {
if (fsWriter != null) {
try {
fsWriter.close(); // 关闭IndexWriter时,才把内存中的数据写到文件
} catch (IOException e) {
e.printStackTrace();
}
}
if (directory != null) {
try {
directory.close(); // 关闭索引存放目录
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
public String getDir() {
return dir;
}
public void setDir(String dir) {
this.dir = dir;
}
public String getJson() {
return json;
}
public void setJson(String json) {
this.json = json;
}
public int getNum() {
return num;
}
public List<EventCommon> getEvents() {
return events;
}
public void setEvents(List<EventCommon> events) {
this.events = events;
}
}
[解决办法]
该回复于2010-10-20 10:47:35被版主删除