1
# Copyright (c) 2010-2013 OpenStack, LLC.
3
# Licensed under the Apache License, Version 2.0 (the "License");
4
# you may not use this file except in compliance with the License.
5
# You may obtain a copy of the License at
7
# http://www.apache.org/licenses/LICENSE-2.0
9
# Unless required by applicable law or agreed to in writing, software
10
# distributed under the License is distributed on an "AS IS" BASIS,
11
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
13
# See the License for the specific language governing permissions and
14
# limitations under the License.
16
from __future__ import with_statement
20
from gzip import GzipFile
21
from shutil import rmtree
22
import cPickle as pickle
25
from test.unit import FakeLogger, mock as unit_mock
26
from swift.common import utils
27
from swift.common.utils import hash_path, mkdirs, normalize_timestamp
28
from swift.common import ring
29
from swift.obj import base as object_base
30
from swift.obj.server import DiskFile
33
def _create_test_ring(path):
34
testgz = os.path.join(path, 'object.ring.gz')
35
intended_replica2part2dev_id = [
36
[0, 1, 2, 3, 4, 5, 6],
37
[1, 2, 3, 0, 5, 6, 4],
38
[2, 3, 0, 1, 6, 4, 5]]
40
{'id': 0, 'device': 'sda', 'zone': 0, 'ip': '127.0.0.0', 'port': 6000},
41
{'id': 1, 'device': 'sda', 'zone': 1, 'ip': '127.0.0.1', 'port': 6000},
42
{'id': 2, 'device': 'sda', 'zone': 2, 'ip': '127.0.0.2', 'port': 6000},
43
{'id': 3, 'device': 'sda', 'zone': 4, 'ip': '127.0.0.3', 'port': 6000},
44
{'id': 4, 'device': 'sda', 'zone': 5, 'ip': '127.0.0.4', 'port': 6000},
45
{'id': 5, 'device': 'sda', 'zone': 6,
46
'ip': 'fe80::202:b3ff:fe1e:8329', 'port': 6000},
47
{'id': 6, 'device': 'sda', 'zone': 7,
48
'ip': '2001:0db8:85a3:0000:0000:8a2e:0370:7334', 'port': 6000}]
49
intended_part_shift = 30
50
intended_reload_time = 15
52
ring.RingData(intended_replica2part2dev_id, intended_devs,
54
GzipFile(testgz, 'wb'))
55
return ring.Ring(path, ring_name='object',
56
reload_time=intended_reload_time)
59
class TestObjectBase(unittest.TestCase):
62
utils.HASH_PATH_SUFFIX = 'endcap'
63
utils.HASH_PATH_PREFIX = ''
64
# Setup a test ring (stolen from common/test_ring.py)
65
self.testdir = tempfile.mkdtemp()
66
self.devices = os.path.join(self.testdir, 'node')
67
rmtree(self.testdir, ignore_errors=1)
68
os.mkdir(self.testdir)
69
os.mkdir(self.devices)
70
os.mkdir(os.path.join(self.devices, 'sda'))
71
self.objects = os.path.join(self.devices, 'sda', 'objects')
72
os.mkdir(self.objects)
74
for part in ['0', '1', '2', '3']:
75
self.parts[part] = os.path.join(self.objects, part)
76
os.mkdir(os.path.join(self.objects, part))
77
self.ring = _create_test_ring(self.testdir)
79
swift_dir=self.testdir, devices=self.devices, mount_check='false',
80
timeout='300', stats_interval='1')
83
rmtree(self.testdir, ignore_errors=1)
85
def test_hash_suffix_hash_dir_is_file_quarantine(self):
86
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
87
mkdirs(os.path.dirname(df.datadir))
88
open(df.datadir, 'wb').close()
89
ohash = hash_path('a', 'c', 'o')
91
whole_path_from = os.path.join(self.objects, '0', data_dir)
92
orig_quarantine_renamer = object_base.quarantine_renamer
95
def wrapped(*args, **kwargs):
97
return orig_quarantine_renamer(*args, **kwargs)
100
object_base.quarantine_renamer = wrapped
101
object_base.hash_suffix(whole_path_from, 101)
103
object_base.quarantine_renamer = orig_quarantine_renamer
104
self.assertTrue(called[0])
106
def test_hash_suffix_one_file(self):
107
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
110
os.path.join(df.datadir,
111
normalize_timestamp(time.time() - 100) + '.ts'),
113
f.write('1234567890')
115
ohash = hash_path('a', 'c', 'o')
116
data_dir = ohash[-3:]
117
whole_path_from = os.path.join(self.objects, '0', data_dir)
118
object_base.hash_suffix(whole_path_from, 101)
119
self.assertEquals(len(os.listdir(self.parts['0'])), 1)
121
object_base.hash_suffix(whole_path_from, 99)
122
self.assertEquals(len(os.listdir(self.parts['0'])), 0)
124
def test_hash_suffix_multi_file_one(self):
125
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
127
for tdiff in [1, 50, 100, 500]:
128
for suff in ['.meta', '.data', '.ts']:
132
normalize_timestamp(int(time.time()) - tdiff) + suff),
134
f.write('1234567890')
137
ohash = hash_path('a', 'c', 'o')
138
data_dir = ohash[-3:]
139
whole_path_from = os.path.join(self.objects, '0', data_dir)
140
hsh_path = os.listdir(whole_path_from)[0]
141
whole_hsh_path = os.path.join(whole_path_from, hsh_path)
143
object_base.hash_suffix(whole_path_from, 99)
144
# only the tombstone should be left
145
self.assertEquals(len(os.listdir(whole_hsh_path)), 1)
147
def test_hash_suffix_multi_file_two(self):
148
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
150
for tdiff in [1, 50, 100, 500]:
151
suffs = ['.meta', '.data']
158
normalize_timestamp(int(time.time()) - tdiff) + suff),
160
f.write('1234567890')
163
ohash = hash_path('a', 'c', 'o')
164
data_dir = ohash[-3:]
165
whole_path_from = os.path.join(self.objects, '0', data_dir)
166
hsh_path = os.listdir(whole_path_from)[0]
167
whole_hsh_path = os.path.join(whole_path_from, hsh_path)
169
object_base.hash_suffix(whole_path_from, 99)
170
# only the meta and data should be left
171
self.assertEquals(len(os.listdir(whole_hsh_path)), 2)
173
def test_invalidate_hash(self):
175
def assertFileData(file_path, data):
176
with open(file_path, 'r') as fp:
178
self.assertEquals(pickle.loads(fdata), pickle.loads(data))
180
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
182
ohash = hash_path('a', 'c', 'o')
183
data_dir = ohash[-3:]
184
whole_path_from = os.path.join(self.objects, '0', data_dir)
185
hashes_file = os.path.join(self.objects, '0',
186
object_base.HASH_FILE)
187
# test that non existent file except caught
188
self.assertEquals(object_base.invalidate_hash(whole_path_from),
190
# test that hashes get cleared
191
check_pickle_data = pickle.dumps({data_dir: None},
192
object_base.PICKLE_PROTOCOL)
193
for data_hash in [{data_dir: None}, {data_dir: 'abcdefg'}]:
194
with open(hashes_file, 'wb') as fp:
195
pickle.dump(data_hash, fp, object_base.PICKLE_PROTOCOL)
196
object_base.invalidate_hash(whole_path_from)
197
assertFileData(hashes_file, check_pickle_data)
199
def test_get_hashes(self):
200
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
203
os.path.join(df.datadir,
204
normalize_timestamp(time.time()) + '.ts'),
206
f.write('1234567890')
207
part = os.path.join(self.objects, '0')
208
hashed, hashes = object_base.get_hashes(part)
209
self.assertEquals(hashed, 1)
210
self.assert_('a83' in hashes)
211
hashed, hashes = object_base.get_hashes(part, do_listdir=True)
212
self.assertEquals(hashed, 0)
213
self.assert_('a83' in hashes)
214
hashed, hashes = object_base.get_hashes(part, recalculate=['a83'])
215
self.assertEquals(hashed, 1)
216
self.assert_('a83' in hashes)
218
def test_get_hashes_bad_dir(self):
219
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
221
with open(os.path.join(self.objects, '0', 'bad'), 'wb') as f:
222
f.write('1234567890')
223
part = os.path.join(self.objects, '0')
224
hashed, hashes = object_base.get_hashes(part)
225
self.assertEquals(hashed, 1)
226
self.assert_('a83' in hashes)
227
self.assert_('bad' not in hashes)
229
def test_get_hashes_unmodified(self):
230
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
233
os.path.join(df.datadir,
234
normalize_timestamp(time.time()) + '.ts'),
236
f.write('1234567890')
237
part = os.path.join(self.objects, '0')
238
hashed, hashes = object_base.get_hashes(part)
241
def getmtime(filename):
244
with unit_mock({'os.path.getmtime': getmtime}):
245
hashed, hashes = object_base.get_hashes(
246
part, recalculate=['a83'])
247
self.assertEquals(i[0], 2)
249
def test_get_hashes_unmodified_and_zero_bytes(self):
250
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
252
part = os.path.join(self.objects, '0')
253
open(os.path.join(part, object_base.HASH_FILE), 'w')
254
# Now the hash file is zero bytes.
257
def getmtime(filename):
260
with unit_mock({'os.path.getmtime': getmtime}):
261
hashed, hashes = object_base.get_hashes(
262
part, recalculate=[])
263
# getmtime will actually not get called. Initially, the pickle.load
264
# will raise an exception first and later, force_rewrite will
265
# short-circuit the if clause to determine whether to write out a fresh
267
self.assertEquals(i[0], 0)
268
self.assertTrue('a83' in hashes)
270
def test_get_hashes_modified(self):
271
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
274
os.path.join(df.datadir,
275
normalize_timestamp(time.time()) + '.ts'),
277
f.write('1234567890')
278
part = os.path.join(self.objects, '0')
279
hashed, hashes = object_base.get_hashes(part)
282
def getmtime(filename):
286
with unit_mock({'os.path.getmtime': getmtime}):
287
hashed, hashes = object_base.get_hashes(
288
part, recalculate=['a83'])
289
self.assertEquals(i[0], 3)
292
if __name__ == '__main__':