~jaypipes/glance/checksum

« back to all changes in this revision

Viewing changes to tests/unit/test_migrations.py

  • Committer: jaypipes at gmail
  • Date: 2011-03-23 14:20:13 UTC
  • mfrom: (75.18.2 bug730213)
  • Revision ID: jaypipes@gmail.com-20110323142013-94zssmvqynz0x56k
Merge bug730213 (Migration fixes and tests)

Show diffs side-by-side

added added

removed removed

Lines of Context:
15
15
#    License for the specific language governing permissions and limitations
16
16
#    under the License.
17
17
 
 
18
"""
 
19
Tests for database migrations. This test case reads the configuration
 
20
file /tests/unit/test_migrations.conf for database connection settings
 
21
to use in the tests. For each connection found in the config file,
 
22
the test case runs a series of test cases to ensure that migrations work
 
23
properly both upgrading and downgrading, and that no data loss occurs
 
24
if possible.
 
25
"""
 
26
 
 
27
import ConfigParser
 
28
import datetime
18
29
import os
19
30
import unittest
20
 
 
 
31
import urlparse
 
32
 
 
33
from migrate.versioning.repository import Repository
 
34
from sqlalchemy import *
 
35
from sqlalchemy.pool import NullPool
 
36
 
 
37
from glance.common import exception
21
38
import glance.registry.db.migration as migration_api
22
 
import glance.registry.db.api as api
23
 
import glance.common.config as config
 
39
from tests.unit.test_misc import execute
24
40
 
25
41
 
26
42
class TestMigrations(unittest.TestCase):
 
43
 
27
44
    """Test sqlalchemy-migrate migrations"""
28
45
 
 
46
    TEST_DATABASES = {}
 
47
    CONFIG_FILE_PATH = os.path.join('tests', 'unit',
 
48
                                    'test_migrations.conf')
 
49
    REPOSITORY_PATH = os.path.join('glance', 'registry', 'db', 'migrate_repo')
 
50
    REPOSITORY = Repository(REPOSITORY_PATH)
 
51
 
 
52
    def __init__(self, *args, **kwargs):
 
53
        super(TestMigrations, self).__init__(*args, **kwargs)
 
54
 
29
55
    def setUp(self):
30
 
        self.db_path = "glance_test_migration.sqlite"
31
 
        sql_connection = os.environ.get('GLANCE_SQL_CONNECTION',
32
 
                                        "sqlite:///%s" % self.db_path)
33
 
 
34
 
        self.options = dict(sql_connection=sql_connection,
35
 
                            verbose=False)
36
 
        config.setup_logging(self.options, {})
 
56
        # Load test databases from the config file. Only do this
 
57
        # once. No need to re-run this on each test...
 
58
        if not TestMigrations.TEST_DATABASES:
 
59
            if os.path.exists(TestMigrations.CONFIG_FILE_PATH):
 
60
                cp = ConfigParser.RawConfigParser()
 
61
                try:
 
62
                    cp.read(TestMigrations.CONFIG_FILE_PATH)
 
63
                    defaults = cp.defaults()
 
64
                    for key, value in defaults.items():
 
65
                        TestMigrations.TEST_DATABASES[key] = value
 
66
                except ConfigParser.ParsingError, e:
 
67
                    print ("Failed to read test_migrations.conf config file. "
 
68
                           "Got error: %s" % e)
 
69
 
 
70
        self.engines = {}
 
71
        for key, value in TestMigrations.TEST_DATABASES.items():
 
72
            self.engines[key] = create_engine(value, poolclass=NullPool)
 
73
 
 
74
        # We start each test case with a completely blank slate.
 
75
        self._reset_databases()
37
76
 
38
77
    def tearDown(self):
39
 
        api.configure_db(self.options)
40
 
        api.unregister_models()
41
 
 
42
 
    def test_db_sync_downgrade_then_upgrade(self):
43
 
        migration_api.db_sync(self.options)
44
 
 
45
 
        latest = migration_api.db_version(self.options)
46
 
 
47
 
        migration_api.downgrade(self.options, latest - 1)
48
 
        cur_version = migration_api.db_version(self.options)
49
 
        self.assertEqual(cur_version, latest - 1)
50
 
 
51
 
        migration_api.upgrade(self.options, cur_version + 1)
52
 
        cur_version = migration_api.db_version(self.options)
53
 
        self.assertEqual(cur_version, latest)
 
78
        # We destroy the test data store between each test case,
 
79
        # and recreate it, which ensures that we have no side-effects
 
80
        # from the tests
 
81
        self._reset_databases()
 
82
 
 
83
    def _reset_databases(self):
 
84
        for key, engine in self.engines.items():
 
85
            conn_string = TestMigrations.TEST_DATABASES[key]
 
86
            conn_pieces = urlparse.urlparse(conn_string)
 
87
            if conn_string.startswith('sqlite'):
 
88
                # We can just delete the SQLite database, which is
 
89
                # the easiest and cleanest solution
 
90
                db_path = conn_pieces.path.strip('/')
 
91
                if os.path.exists(db_path):
 
92
                    os.unlink(db_path)
 
93
                # No need to recreate the SQLite DB. SQLite will
 
94
                # create it for us if it's not there...
 
95
            elif conn_string.startswith('mysql'):
 
96
                # We can execute the MySQL client to destroy and re-create
 
97
                # the MYSQL database, which is easier and less error-prone
 
98
                # than using SQLAlchemy to do this via MetaData...trust me.
 
99
                database = conn_pieces.path.strip('/')
 
100
                loc_pieces = conn_pieces.netloc.split('@')
 
101
                host = loc_pieces[1]
 
102
                auth_pieces = loc_pieces[0].split(':')
 
103
                user = auth_pieces[0]
 
104
                password = ""
 
105
                if len(auth_pieces) > 1:
 
106
                    if auth_pieces[1].strip():
 
107
                        password = "-p%s" % auth_pieces[1]
 
108
                sql = ("drop database if exists %(database)s; "
 
109
                       "create database %(database)s;") % locals()
 
110
                cmd = ("mysql -u%(user)s %(password)s -h%(host)s "
 
111
                       "-e\"%(sql)s\"") % locals()
 
112
                exitcode, out, err = execute(cmd)
 
113
                self.assertEqual(0, exitcode)
 
114
 
 
115
    def test_walk_versions(self):
 
116
        """
 
117
        Walks all version scripts for each tested database, ensuring
 
118
        that there are no errors in the version scripts for each engine
 
119
        """
 
120
        for key, engine in self.engines.items():
 
121
            options = {'sql_connection': TestMigrations.TEST_DATABASES[key]}
 
122
            self._walk_versions(options)
 
123
 
 
124
    def _walk_versions(self, options):
 
125
        # Determine latest version script from the repo, then
 
126
        # upgrade from 1 through to the latest, with no data
 
127
        # in the databases. This just checks that the schema itself
 
128
        # upgrades successfully.
 
129
 
 
130
        # Assert we are not under version control...
 
131
        self.assertRaises(exception.DatabaseMigrationError,
 
132
                          migration_api.db_version,
 
133
                          options)
 
134
        # Place the database under version control
 
135
        migration_api.version_control(options)
 
136
 
 
137
        latest = migration_api.db_version(options)
 
138
        self.assertEqual(0, latest)
 
139
 
 
140
        for version in xrange(1, TestMigrations.REPOSITORY.latest):
 
141
            migration_api.upgrade(options, version)
 
142
            cur_version = migration_api.db_version(options)
 
143
            self.assertEqual(cur_version, version)
 
144
 
 
145
        # Now walk it back down to 0 from the latest, testing
 
146
        # the downgrade paths.
 
147
        for version in xrange(TestMigrations.REPOSITORY.latest - 1, 0):
 
148
            migration_api.downgrade(options, version)
 
149
            cur_version = migration_api.db_version(options)
 
150
            self.assertEqual(cur_version, version)
 
151
 
 
152
    def test_no_data_loss_2_to_3_to_2(self):
 
153
        """
 
154
        Here, we test that in the case when we moved a column "type" from the
 
155
        base images table to be records in the image_properties table, that
 
156
        we don't lose any data during the migration. Similarly, we test that
 
157
        on downgrade, we don't lose any data, as the records are moved from
 
158
        the image_properties table back into the base image table.
 
159
        """
 
160
        for key, engine in self.engines.items():
 
161
            options = {'sql_connection': TestMigrations.TEST_DATABASES[key]}
 
162
            self._no_data_loss_2_to_3_to_2(engine, options)
 
163
 
 
164
    def _no_data_loss_2_to_3_to_2(self, engine, options):
 
165
        migration_api.version_control(options)
 
166
        migration_api.upgrade(options, 2)
 
167
 
 
168
        cur_version = migration_api.db_version(options)
 
169
        self.assertEquals(2, cur_version)
 
170
 
 
171
        # We are now on version 2. Check that the images table does
 
172
        # not contain the type column...
 
173
 
 
174
        images_table = Table('images', MetaData(), autoload=True,
 
175
                             autoload_with=engine)
 
176
 
 
177
        image_properties_table = Table('image_properties', MetaData(),
 
178
                                       autoload=True,
 
179
                                       autoload_with=engine)
 
180
 
 
181
        self.assertTrue('type' in images_table.c,
 
182
                        "'type' column found in images table columns! "
 
183
                        "images table columns: %s"
 
184
                        % images_table.c.keys())
 
185
 
 
186
        conn = engine.connect()
 
187
        sel = select([func.count("*")], from_obj=[images_table])
 
188
        orig_num_images = conn.execute(sel).scalar()
 
189
        sel = select([func.count("*")], from_obj=[image_properties_table])
 
190
        orig_num_image_properties = conn.execute(sel).scalar()
 
191
 
 
192
        now = datetime.datetime.now()
 
193
        inserter = images_table.insert()
 
194
        conn.execute(inserter, [
 
195
                {'deleted': False, 'created_at': now,
 
196
                 'updated_at': now, 'type': 'kernel',
 
197
                 'status': 'active', 'is_public': True},
 
198
                {'deleted': False, 'created_at': now,
 
199
                 'updated_at': now, 'type': 'ramdisk',
 
200
                 'status': 'active', 'is_public': True}])
 
201
 
 
202
        sel = select([func.count("*")], from_obj=[images_table])
 
203
        num_images = conn.execute(sel).scalar()
 
204
        self.assertEqual(orig_num_images + 2, num_images)
 
205
        conn.close()
 
206
 
 
207
        # Now let's upgrade to 3. This should move the type column
 
208
        # to the image_properties table as type properties.
 
209
 
 
210
        migration_api.upgrade(options, 3)
 
211
 
 
212
        cur_version = migration_api.db_version(options)
 
213
        self.assertEquals(3, cur_version)
 
214
 
 
215
        images_table = Table('images', MetaData(), autoload=True,
 
216
                             autoload_with=engine)
 
217
 
 
218
        self.assertTrue('type' not in images_table.c,
 
219
                        "'type' column not found in images table columns! "
 
220
                        "images table columns reported by metadata: %s\n"
 
221
                        % images_table.c.keys())
 
222
 
 
223
        image_properties_table = Table('image_properties', MetaData(),
 
224
                                       autoload=True,
 
225
                                       autoload_with=engine)
 
226
 
 
227
        conn = engine.connect()
 
228
        sel = select([func.count("*")], from_obj=[image_properties_table])
 
229
        num_image_properties = conn.execute(sel).scalar()
 
230
        self.assertEqual(orig_num_image_properties + 2, num_image_properties)
 
231
        conn.close()
 
232
 
 
233
        # Downgrade to 2 and check that the type properties were moved
 
234
        # to the main image table
 
235
 
 
236
        migration_api.downgrade(options, 2)
 
237
 
 
238
        images_table = Table('images', MetaData(), autoload=True,
 
239
                             autoload_with=engine)
 
240
 
 
241
        self.assertTrue('type' in images_table.c,
 
242
                        "'type' column found in images table columns! "
 
243
                        "images table columns: %s"
 
244
                        % images_table.c.keys())
 
245
 
 
246
        image_properties_table = Table('image_properties', MetaData(),
 
247
                                       autoload=True,
 
248
                                       autoload_with=engine)
 
249
 
 
250
        conn = engine.connect()
 
251
        sel = select([func.count("*")], from_obj=[image_properties_table])
 
252
        last_num_image_properties = conn.execute(sel).scalar()
 
253
 
 
254
        self.assertEqual(num_image_properties - 2, last_num_image_properties)