1
# See the file LICENSE for redistribution information.
3
# Copyright (c) 1996-2002
4
# Sleepycat Software. All rights reserved.
9
# TEST Test using set_dup_compare.
11
# TEST Use the first 10,000 entries from the dictionary.
12
# TEST Insert each with self as key and data; retrieve each.
13
# TEST After all are entered, retrieve all; compare output to original.
14
# TEST Close file, reopen, do retrieve and re-verify.
15
proc test094 { method {nentries 10000} {ndups 10} {tnum "94"} args} {
19
set dbargs [convert_args $method $args]
20
set omethod [convert_method $method]
22
if { [is_btree $method] != 1 && [is_hash $method] != 1 } {
23
puts "Test0$tnum: skipping for method $method."
28
set eindex [lsearch -exact $dbargs "-env"]
29
# Create the database and open the dictionary
31
# If we are using an env, then testfile should just be the db name.
32
# Otherwise it is the test directory and the name.
33
if { $eindex == -1 } {
34
set testfile $testdir/test0$tnum-a.db
37
set testfile test0$tnum-a.db
39
set env [lindex $dbargs $eindex]
40
set rpcenv [is_rpcenv $env]
42
puts "Test0$tnum: skipping for RPC"
45
set txnenv [is_txnenv $env]
47
append dbargs " -auto_commit "
48
if { $nentries == 10000 } {
51
reduce_dups nentries ndups
53
set testdir [get_home $env]
55
puts "Test0$tnum: $method ($args) $nentries \
56
with $ndups dups using dupcompare"
60
set db [eval {berkdb_open_noerr -dupcompare test094_cmp \
61
-dup -dupsort -create -mode 0644} $omethod $dbargs {$testfile}]
62
error_check_good dbopen [is_valid_db $db] TRUE
69
puts "\tTest0$tnum.a: $nentries put/get duplicates loop"
70
# Here is the loop where we put and get each key/data pair
73
for {set i 0} {$i < $ndups} {incr i} {
74
set dlist [linsert $dlist 0 $i]
76
while { [gets $did str] != -1 && $count < $nentries } {
78
for {set i 0} {$i < $ndups} {incr i} {
82
error_check_good txn [is_valid_txn $t $env] TRUE
85
set ret [eval {$db put} \
86
$txn $pflags {$key [chop_data $omethod $data]}]
87
error_check_good put $ret 0
89
error_check_good txn [$t commit] 0
93
set ret [eval {$db get} $gflags {$key}]
94
error_check_good get [llength $ret] $ndups
98
# Now we will get each key from the DB and compare the results
100
puts "\tTest0$tnum.b: traverse checking duplicates before close"
101
if { $txnenv == 1 } {
103
error_check_good txn [is_valid_txn $t $env] TRUE
106
dup_check $db $txn $t1 $dlist
107
if { $txnenv == 1 } {
108
error_check_good txn [$t commit] 0
110
error_check_good db_close [$db close] 0
112
# Set up second testfile so truncate flag is not needed.
113
# If we are using an env, then testfile should just be the db name.
114
# Otherwise it is the test directory and the name.
115
if { $eindex == -1 } {
116
set testfile $testdir/test0$tnum-b.db
119
set testfile test0$tnum-b.db
120
set env [lindex $dbargs $eindex]
121
set testdir [get_home $env]
123
cleanup $testdir $env
126
# Test dupcompare with data items big enough to force offpage dups.
128
puts "\tTest0$tnum.c: big key put/get dup loop key=filename data=filecontents"
129
set db [eval {berkdb_open -dupcompare test094_cmp -dup -dupsort \
130
-create -mode 0644} $omethod $dbargs $testfile]
131
error_check_good dbopen [is_valid_db $db] TRUE
133
# Here is the loop where we put and get each key/data pair
134
set file_list [get_file_list 1]
135
if { [llength $file_list] > $nentries } {
136
set file_list [lrange $file_list 1 $nentries]
140
foreach f $file_list {
142
fconfigure $fid -translation binary
147
for {set i 0} {$i < $ndups} {incr i} {
149
if { $txnenv == 1 } {
151
error_check_good txn [is_valid_txn $t $env] TRUE
154
set ret [eval {$db put} \
155
$txn $pflags {$key [chop_data $omethod $data]}]
156
error_check_good put $ret 0
157
if { $txnenv == 1 } {
158
error_check_good txn [$t commit] 0
162
set ret [eval {$db get} $gflags {$key}]
163
error_check_good get [llength $ret] $ndups
167
puts "\tTest0$tnum.d: traverse checking duplicates before close"
168
if { $txnenv == 1 } {
170
error_check_good txn [is_valid_txn $t $env] TRUE
173
dup_file_check $db $txn $t1 $dlist
174
if { $txnenv == 1 } {
175
error_check_good txn [$t commit] 0
176
set testdir [get_home $env]
178
error_check_good db_close [$db close] 0
180
# Clean up the test directory, since there's currently
181
# no way to specify a dup_compare function to berkdb dbverify
182
# and without one it will fail.
183
cleanup $testdir $env
186
# Simple dup comparison.
187
proc test094_cmp { a b } {
188
return [string compare $b $a]
191
# Check if each key appears exactly [llength dlist] times in the file with
192
# the duplicate tags matching those that appear in dlist.
193
proc test094_dup_big { db txn tmpfile dlist {extra 0}} {
196
set outf [open $tmpfile w]
197
# Now we will get each key from the DB and dump to outfile
198
set c [eval {$db cursor} $txn]
201
while { $done != 1} {
203
set rec [$c get "-next"]
204
if { [string length $rec] == 0 } {
208
set key [lindex [lindex $rec 0] 0]
209
set fulldata [lindex [lindex $rec 0] 1]
210
set id [id_of $fulldata]
211
set d [data_of $fulldata]
212
if { [string compare $key $lastkey] != 0 && \
213
$id != [lindex $dlist 0] } {
214
set e [lindex $dlist 0]
216
$key, expected dup id $e, got $id"
218
error_check_good dupget.data $d $key
219
error_check_good dupget.id $id $did
223
# Some tests add an extra dup (like overflow entries)
224
# Check id if it exists.
227
set rec [$c get "-next"]
228
if { [string length $rec] != 0 } {
229
set key [lindex [lindex $rec 0] 0]
231
# If this key has no extras, go back for
233
if { [string compare $key $lastkey] != 0 } {
235
set rec [$c get "-prev"]
237
set fulldata [lindex [lindex $rec 0] 1]
238
set id [id_of $fulldata]
239
set d [data_of $fulldata]
240
error_check_bad dupget.data1 $d $key
241
error_check_good dupget.id1 $id $extra
250
error_check_good curs_close [$c close] 0