2
* Licensed to the Apache Software Foundation (ASF) under one or more
3
* contributor license agreements. See the NOTICE file distributed with
4
* this work for additional information regarding copyright ownership.
5
* The ASF licenses this file to You under the Apache License, Version 2.0
6
* (the "License"); you may not use this file except in compliance with
7
* the License. You may obtain a copy of the License at
9
* http://www.apache.org/licenses/LICENSE-2.0
11
* Unless required by applicable law or agreed to in writing, software
12
* distributed under the License is distributed on an "AS IS" BASIS,
13
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
* See the License for the specific language governing permissions and
15
* limitations under the License.
21
package org.apache.solr.update;
23
import org.apache.lucene.index.IndexWriter;
24
import org.apache.lucene.index.IndexReader;
25
import org.apache.lucene.index.TermDocs;
26
import org.apache.lucene.index.Term;
27
import org.apache.lucene.document.Document;
28
import org.apache.lucene.search.Query;
30
import java.util.HashSet;
31
import java.util.concurrent.Future;
32
import java.util.concurrent.ExecutionException;
33
import java.io.IOException;
36
import org.apache.solr.search.SolrIndexSearcher;
37
import org.apache.solr.search.QueryParsing;
38
import org.apache.solr.update.UpdateHandler;
39
import org.apache.solr.common.SolrException;
40
import org.apache.solr.common.util.NamedList;
41
import org.apache.solr.common.util.SimpleOrderedMap;
42
import org.apache.solr.core.SolrCore;
45
* <code>DirectUpdateHandler</code> implements an UpdateHandler where documents are added
46
* directly to the main lucene index as opposed to adding to a separate smaller index.
47
* For this reason, not all combinations to/from pending and committed are supported.
49
* @version $Id: DirectUpdateHandler.java 1065312 2011-01-30 16:08:25Z rmuir $
52
* @deprecated Use {@link DirectUpdateHandler2} instead. This is only kept around for back-compatibility (way back).
55
public class DirectUpdateHandler extends UpdateHandler {
57
// the set of ids in the "pending set" (those docs that have been added, but
58
// that are not yet visible.
59
final HashSet<String> pset;
61
SolrIndexSearcher searcher;
62
int numAdds=0; // number of docs added to the pending set
63
int numPending=0; // number of docs currently in this pending set
64
int numDeleted=0; // number of docs deleted or
67
public DirectUpdateHandler(SolrCore core) throws IOException {
69
pset = new HashSet<String>(256);
73
protected void openWriter() throws IOException {
75
writer = createMainIndexWriter("DirectUpdateHandler", false);
79
protected void closeWriter() throws IOException {
81
if (writer!=null) writer.close();
83
// TODO: if an exception causes the writelock to not be
84
// released, we could delete it here.
89
protected void openSearcher() throws IOException {
91
searcher = core.newSearcher("DirectUpdateHandler");
95
protected void closeSearcher() throws IOException {
97
if (searcher!=null) searcher.close();
99
// TODO: if an exception causes the writelock to not be
100
// released, we could delete it here.
105
protected void doAdd(Document doc) throws IOException {
106
closeSearcher(); openWriter();
107
writer.addDocument(doc);
110
protected boolean existsInIndex(String indexedId) throws IOException {
111
if (idField == null) throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"Operation requires schema to have a unique key field");
115
IndexReader ir = searcher.getReader();
116
TermDocs tdocs = null;
117
boolean exists=false;
119
tdocs = ir.termDocs(idTerm(indexedId));
120
if (tdocs.next()) exists=true;
122
try { if (tdocs != null) tdocs.close(); } catch (Exception e) {}
128
protected int deleteInIndex(String indexedId) throws IOException {
129
if (idField == null) throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"Operation requires schema to have a unique key field");
131
closeWriter(); openSearcher();
132
IndexReader ir = searcher.getReader();
133
TermDocs tdocs = null;
136
Term term = new Term(idField.getName(), indexedId);
137
num = ir.deleteDocuments(term);
138
if (core.log.isTraceEnabled()) {
139
core.log.trace( core.getLogId()+"deleted " + num + " docs matching id " + idFieldType.indexedToReadable(indexedId));
142
try { if (tdocs != null) tdocs.close(); } catch (Exception e) {}
147
protected void overwrite(String indexedId, Document doc) throws IOException {
148
if (indexedId ==null) indexedId =getIndexedId(doc);
149
deleteInIndex(indexedId);
153
/************** Direct update handler - pseudo code ***********
154
def add(doc, id, allowDups, overwritePending, overwriteCommitted):
155
if not overwritePending and not overwriteCommitted:
156
#special case... no need to check pending set, and we don't keep
157
#any state around about this addition
159
committed[id]=doc #100
162
#if no dups allowed, we must check the *current* index (pending and committed)
163
if not committed[id]: committed[id]=doc #000
165
#001 (searchd addConditionally)
166
if not allowDups and not overwritePending and pending[id]: return
167
del committed[id] #delete from pending and committed 111 011
170
****************************************************************/
172
// could return the number of docs deleted, but is that always possible to know???
174
public void delete(DeleteUpdateCommand cmd) throws IOException {
175
if (!cmd.fromPending && !cmd.fromCommitted)
176
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"meaningless command: " + cmd);
177
if (!cmd.fromPending || !cmd.fromCommitted)
178
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"operation not supported" + cmd);
179
String indexedId = idFieldType.toInternal(cmd.id);
181
deleteInIndex(indexedId);
182
pset.remove(indexedId);
186
// TODO - return number of docs deleted?
187
// Depending on implementation, we may not be able to immediately determine num...
189
public void deleteByQuery(DeleteUpdateCommand cmd) throws IOException {
190
if (!cmd.fromPending && !cmd.fromCommitted)
191
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"meaningless command: " + cmd);
192
if (!cmd.fromPending || !cmd.fromCommitted)
193
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"operation not supported: " + cmd);
195
Query q = QueryParsing.parseQuery(cmd.query, schema);
199
closeWriter(); openSearcher();
201
// if we want to count the number of docs that were deleted, then
202
// we need a new instance of the DeleteHitCollector
203
final DeleteHitCollector deleter = new DeleteHitCollector(searcher);
204
searcher.search(q, null, deleter);
205
totDeleted = deleter.deleted;
208
if (core.log.isDebugEnabled()) {
209
core.log.debug(core.getLogId()+"docs deleted:" + totDeleted);
214
/**************** old hit collector... new one is in base class
215
// final DeleteHitCollector deleter = new DeleteHitCollector();
216
class DeleteHitCollector extends HitCollector {
217
public int deleted=0;
218
public void collect(int doc, float score) {
220
searcher.getReader().delete(doc);
222
} catch (IOException e) {
223
try { closeSearcher(); } catch (Exception ee) { SolrException.log(SolrCore.log,ee); }
224
SolrException.log(SolrCore.log,e);
225
throw new SolrException( SolrException.StatusCode.SERVER_ERROR,"Error deleting doc# "+doc,e);
229
***************************/
232
public int mergeIndexes(MergeIndexesCommand cmd) throws IOException {
233
throw new SolrException(
234
SolrException.ErrorCode.BAD_REQUEST,
235
"DirectUpdateHandler doesn't support mergeIndexes. Use DirectUpdateHandler2 instead.");
239
public void commit(CommitUpdateCommand cmd) throws IOException {
240
Future[] waitSearcher = null;
241
if (cmd.waitSearcher) {
242
waitSearcher = new Future[1];
245
synchronized (this) {
247
closeSearcher(); // flush any deletes
248
if (cmd.optimize || cmd.expungeDeletes) {
249
openWriter(); // writer needs to be open to optimize
250
if(cmd.optimize) writer.optimize(cmd.maxOptimizeSegments);
251
if(cmd.expungeDeletes) writer.expungeDeletes(cmd.expungeDeletes);
255
callPostCommitCallbacks();
257
callPostOptimizeCallbacks();
260
core.getSearcher(true,false,waitSearcher);
263
if (waitSearcher!=null && waitSearcher[0] != null) {
265
waitSearcher[0].get();
266
} catch (InterruptedException e) {
267
SolrException.log(log,e);
268
} catch (ExecutionException e) {
269
SolrException.log(log,e);
280
public void rollback(RollbackUpdateCommand cmd) throws IOException {
281
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,
282
"DirectUpdateHandler doesn't support rollback. Use DirectUpdateHandler2 instead.");
286
///////////////////////////////////////////////////////////////////
287
/////////////////// helper method for each add type ///////////////
288
///////////////////////////////////////////////////////////////////
290
protected int addNoOverwriteNoDups(AddUpdateCommand cmd) throws IOException {
291
if (cmd.indexedId ==null) {
292
cmd.indexedId =getIndexedId(cmd.doc);
294
synchronized (this) {
295
if (existsInIndex(cmd.indexedId)) return 0;
301
protected int addConditionally(AddUpdateCommand cmd) throws IOException {
302
if (cmd.indexedId ==null) {
303
cmd.indexedId =getIndexedId(cmd.doc);
306
if (pset.contains(cmd.indexedId)) return 0;
307
// since case 001 is currently the only case to use pset, only add
308
// to it in that instance.
309
pset.add(cmd.indexedId);
310
overwrite(cmd.indexedId,cmd.doc);
316
// overwrite both pending and committed
317
protected synchronized int overwriteBoth(AddUpdateCommand cmd) throws IOException {
318
overwrite(cmd.indexedId, cmd.doc);
323
// add without checking
324
protected synchronized int allowDups(AddUpdateCommand cmd) throws IOException {
331
public int addDoc(AddUpdateCommand cmd) throws IOException {
333
// if there is no ID field, use allowDups
334
if( idField == null ) {
335
cmd.allowDups = true;
336
cmd.overwriteCommitted = false;
337
cmd.overwritePending = false;
340
if (!cmd.allowDups && !cmd.overwritePending && !cmd.overwriteCommitted) {
341
return addNoOverwriteNoDups(cmd);
342
} else if (!cmd.allowDups && !cmd.overwritePending && cmd.overwriteCommitted) {
343
return addConditionally(cmd);
344
} else if (!cmd.allowDups && cmd.overwritePending && !cmd.overwriteCommitted) {
345
// return overwriteBoth(cmd);
346
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"unsupported param combo:" + cmd);
347
} else if (!cmd.allowDups && cmd.overwritePending && cmd.overwriteCommitted) {
348
return overwriteBoth(cmd);
349
} else if (cmd.allowDups && !cmd.overwritePending && !cmd.overwriteCommitted) {
350
return allowDups(cmd);
351
} else if (cmd.allowDups && !cmd.overwritePending && cmd.overwriteCommitted) {
352
// return overwriteBoth(cmd);
353
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"unsupported param combo:" + cmd);
354
} else if (cmd.allowDups && cmd.overwritePending && !cmd.overwriteCommitted) {
355
// return overwriteBoth(cmd);
356
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"unsupported param combo:" + cmd);
357
} else if (cmd.allowDups && cmd.overwritePending && cmd.overwriteCommitted) {
358
return overwriteBoth(cmd);
360
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"unsupported param combo:" + cmd);
364
public void close() throws IOException {
373
/////////////////////////////////////////////////////////////////////
374
// SolrInfoMBean stuff: Statistics and Module Info
375
/////////////////////////////////////////////////////////////////////
377
public String getName() {
378
return DirectUpdateHandler.class.getName();
381
public String getVersion() {
382
return SolrCore.version;
385
public String getDescription() {
386
return "Update handler that directly changes the on-disk main lucene index";
389
public Category getCategory() {
390
return Category.CORE;
393
public String getSourceId() {
394
return "$Id: DirectUpdateHandler.java 1065312 2011-01-30 16:08:25Z rmuir $";
397
public String getSource() {
398
return "$URL: http://svn.apache.org/repos/asf/lucene/dev/tags/lucene_solr_3_5_0/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler.java $";
401
public URL[] getDocs() {
405
public NamedList getStatistics() {
406
NamedList lst = new SimpleOrderedMap();