1
package org.apache.lucene.index;
4
* Licensed to the Apache Software Foundation (ASF) under one or more
5
* contributor license agreements. See the NOTICE file distributed with
6
* this work for additional information regarding copyright ownership.
7
* The ASF licenses this file to You under the Apache License, Version 2.0
8
* (the "License"); you may not use this file except in compliance with
9
* the License. You may obtain a copy of the License at
11
* http://www.apache.org/licenses/LICENSE-2.0
13
* Unless required by applicable law or agreed to in writing, software
14
* distributed under the License is distributed on an "AS IS" BASIS,
15
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16
* See the License for the specific language governing permissions and
17
* limitations under the License.
20
import java.io.IOException;
21
import java.util.ArrayList;
22
import java.util.List;
23
import java.util.Random;
26
import org.apache.lucene.analysis.MockAnalyzer;
27
import org.apache.lucene.store.Directory;
28
import org.apache.lucene.store.MockDirectoryWrapper;
29
import org.apache.lucene.store.RAMDirectory;
30
import org.apache.lucene.util.ArrayUtil;
31
import org.apache.lucene.util.LuceneTestCase;
32
import org.apache.lucene.util.Version;
34
public class TestPerSegmentDeletes extends LuceneTestCase {
35
public void testDeletes1() throws Exception {
36
//IndexWriter.debug2 = System.out;
37
Directory dir = new MockDirectoryWrapper(new Random(), new RAMDirectory());
38
IndexWriterConfig iwc = new IndexWriterConfig(Version.LUCENE_CURRENT,
39
new MockAnalyzer(random));
40
iwc.setMergeScheduler(new SerialMergeScheduler());
41
iwc.setMaxBufferedDocs(5000);
42
iwc.setRAMBufferSizeMB(100);
43
RangeMergePolicy fsmp = new RangeMergePolicy(false);
44
iwc.setMergePolicy(fsmp);
45
IndexWriter writer = new IndexWriter(dir, iwc);
46
writer.setInfoStream(VERBOSE ? System.out : null);
47
for (int x = 0; x < 5; x++) {
48
writer.addDocument(DocHelper.createDocument(x, "1", 2));
49
//System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
51
//System.out.println("commit1");
53
assertEquals(1, writer.segmentInfos.size());
54
for (int x = 5; x < 10; x++) {
55
writer.addDocument(DocHelper.createDocument(x, "2", 2));
56
//System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
58
//System.out.println("commit2");
60
assertEquals(2, writer.segmentInfos.size());
62
for (int x = 10; x < 15; x++) {
63
writer.addDocument(DocHelper.createDocument(x, "3", 2));
64
//System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
67
writer.deleteDocuments(new Term("id", "1"));
69
writer.deleteDocuments(new Term("id", "11"));
71
// flushing without applying deletes means
72
// there will still be deletes in the segment infos
73
writer.flush(false, false);
74
assertTrue(writer.bufferedDeletesStream.any());
76
// get reader flushes pending deletes
77
// so there should not be anymore
78
IndexReader r1 = writer.getReader();
79
assertFalse(writer.bufferedDeletesStream.any());
82
// delete id:2 from the first segment
83
// merge segments 0 and 1
84
// which should apply the delete id:2
85
writer.deleteDocuments(new Term("id", "2"));
86
writer.flush(false, false);
92
assertEquals(2, writer.segmentInfos.size());
94
// id:2 shouldn't exist anymore because
95
// it's been applied in the merge and now it's gone
96
IndexReader r2 = writer.getReader();
97
int[] id2docs = toDocsArray(new Term("id", "2"), r2);
98
assertTrue(id2docs == null);
102
// added docs are in the ram buffer
103
for (int x = 15; x < 20; x++) {
104
writer.addDocument(TestIndexWriterReader.createDocument(x, "4", 2));
105
System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
107
assertTrue(writer.numRamDocs() > 0);
108
// delete from the ram buffer
109
writer.deleteDocuments(new Term("id", Integer.toString(13)));
111
Term id3 = new Term("id", Integer.toString(3));
113
// delete from the 1st segment
114
writer.deleteDocuments(id3);
116
assertTrue(writer.numRamDocs() > 0);
119
// .println("segdels1:" + writer.docWriter.deletesToString());
121
//assertTrue(writer.docWriter.segmentDeletes.size() > 0);
123
// we cause a merge to happen
127
System.out.println("maybeMerge "+writer.segmentInfos);
129
SegmentInfo info0 = writer.segmentInfos.info(0);
130
SegmentInfo info1 = writer.segmentInfos.info(1);
133
System.out.println("maybeMerge after "+writer.segmentInfos);
134
// there should be docs in RAM
135
assertTrue(writer.numRamDocs() > 0);
137
// assert we've merged the 1 and 2 segments
138
// and still have a segment leftover == 2
139
assertEquals(2, writer.segmentInfos.size());
140
assertFalse(segThere(info0, writer.segmentInfos));
141
assertFalse(segThere(info1, writer.segmentInfos));
143
//System.out.println("segdels2:" + writer.docWriter.deletesToString());
145
//assertTrue(writer.docWriter.segmentDeletes.size() > 0);
147
IndexReader r = writer.getReader();
148
IndexReader r1 = r.getSequentialSubReaders()[0];
149
printDelDocs(r1.getDeletedDocs());
150
int[] docs = toDocsArray(id3, null, r);
151
System.out.println("id3 docs:"+Arrays.toString(docs));
152
// there shouldn't be any docs for id:3
153
assertTrue(docs == null);
158
// System.out.println("segdels2:"+writer.docWriter.segmentDeletes.toString());
159
//System.out.println("close");
165
static boolean hasPendingDeletes(SegmentInfos infos) {
166
for (SegmentInfo info : infos) {
167
if (info.deletes.any()) {
174
void part2(IndexWriter writer, RangeMergePolicy fsmp) throws Exception {
175
for (int x = 20; x < 25; x++) {
176
writer.addDocument(DocHelper.createDocument(x, "5", 2));
177
//System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
179
writer.flush(false, false);
180
for (int x = 25; x < 30; x++) {
181
writer.addDocument(DocHelper.createDocument(x, "5", 2));
182
//System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
184
writer.flush(false, false);
186
//System.out.println("infos3:"+writer.segmentInfos);
188
Term delterm = new Term("id", "8");
189
writer.deleteDocuments(delterm);
190
//System.out.println("segdels3:" + writer.docWriter.deletesToString());
197
// deletes for info1, the newly created segment from the
198
// merge should have no deletes because they were applied in
200
//SegmentInfo info1 = writer.segmentInfos.info(1);
201
//assertFalse(exists(info1, writer.docWriter.segmentDeletes));
203
//System.out.println("infos4:"+writer.segmentInfos);
204
//System.out.println("segdels4:" + writer.docWriter.deletesToString());
207
boolean segThere(SegmentInfo info, SegmentInfos infos) {
208
for (SegmentInfo si : infos) {
209
if (si.name.equals(info.name)) return true;
214
public static int[] toDocsArray(Term term, IndexReader reader)
216
TermDocs termDocs = reader.termDocs();
218
return toArray(termDocs);
221
public static int[] toArray(TermDocs termDocs) throws IOException {
222
List<Integer> docs = new ArrayList<Integer>();
223
while (termDocs.next()) {
224
docs.add(termDocs.doc());
226
if (docs.size() == 0) {
229
return ArrayUtil.toIntArray(docs);
233
public class RangeMergePolicy extends MergePolicy {
234
boolean doMerge = false;
238
private final boolean useCompoundFile;
240
private RangeMergePolicy(boolean useCompoundFile) {
241
this.useCompoundFile = useCompoundFile;
245
public void close() {}
248
public MergeSpecification findMerges(SegmentInfos segmentInfos)
249
throws CorruptIndexException, IOException {
250
MergeSpecification ms = new MergeSpecification();
252
OneMerge om = new OneMerge(segmentInfos.asList().subList(start, start + length));
261
public MergeSpecification findForcedMerges(SegmentInfos segmentInfos,
262
int maxSegmentCount, Map<SegmentInfo,Boolean> segmentsToMerge)
263
throws CorruptIndexException, IOException {
268
public MergeSpecification findForcedDeletesMerges(
269
SegmentInfos segmentInfos) throws CorruptIndexException, IOException {
274
public boolean useCompoundFile(SegmentInfos segments, SegmentInfo newSegment) {
275
return useCompoundFile;