import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Hits;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.index.Term;
import java.util.BitSet;
import java.io.IOException;
class AdvancedFilter extends Filter
{
public BitSet bits(IndexReader reader) throws IOException
{
final int SECURITY_ADVANCED = 0;
final BitSet bits = new BitSet(reader.maxDoc());
bits.set(0, bits.size() - 1);
Term term = new Term("level", SECURITY_ADVANCED + "");
IndexSearcher searcher = new IndexSearcher(reader);
Hits hits = searcher.search(new TermQuery(term));
for (int i = 0; i < hits.length(); i++)
bits.set(hits.id(i), false);
return bits;
}
}
public class TestSearch
{
public static final String indexpath = "/root/index";
public static final int SECURITY_ADVANCED = 0;
public static final int SECURITY_MIDDLE = 1;
public static final int SECURITY_NORMAL = 2;
public static void main(String args[]) throws Exception
{
IndexWriter writer = new IndexWriter(indexpath, new StandardAnalyzer(), true);
Document doc1 = new Document();
Field field1 = new Field("booknum", "00001", Field.Store.YES, Field.Index.UN_TOKENIZED);
Field field2 = new Field("content", "fan fei", Field.Store.YES, Field.Index.TOKENIZED);
Field field3 = new Field("level", SECURITY_ADVANCED + "", Field.Store.YES, Field.Index.UN_TOKENIZED);
doc1.add(field1);
doc1.add(field2);
doc1.add(field3);
Document doc2 = new Document();
Field field4 = new Field("booknum", "00002", Field.Store.YES, Field.Index.UN_TOKENIZED);
Field field5 = new Field("content", "fan song", Field.Store.YES, Field.Index.TOKENIZED);
Field field6 = new Field("level", SECURITY_MIDDLE + "", Field.Store.YES, Field.Index.UN_TOKENIZED);
doc2.add(field4);
doc2.add(field5);
doc2.add(field6);
Document doc3 = new Document();
Field field7 = new Field("booknum", "00003", Field.Store.YES, Field.Index.UN_TOKENIZED);
Field field8 = new Field("content", "fan ning", Field.Store.YES, Field.Index.TOKENIZED);
Field field9 = new Field("level", SECURITY_NORMAL + "", Field.Store.YES, Field.Index.UN_TOKENIZED);
doc3.add(field7);
doc3.add(field8);
doc3.add(field9);
writer.addDocument(doc1);
writer.addDocument(doc2);
writer.addDocument(doc3);
writer.close();
IndexSearcher searcher = new IndexSearcher(indexpath);
QueryParser parser = new QueryParser("content", new StandardAnalyzer());
Query query = parser.parse("fan");
Hits hits = searcher.search(query, new AdvancedFilter());
for (int i = 0; i < hits.length(); i++)
System.out.println(hits.doc(i));
}
}
|