~ubuntu-cloud-archive/ubuntu/precise/nova/trunk

« back to all changes in this revision

Viewing changes to nova/db/sqlalchemy/api.py

  • Committer: Package Import Robot
  • Author(s): Chuck Short, Adam Gandelman, Chuck Short, Vishvananda Ishaya
  • Date: 2012-09-20 07:45:50 UTC
  • mfrom: (1.1.62)
  • Revision ID: package-import@ubuntu.com-20120920074550-fzmmmzqcntnw1vu7
Tags: 2012.2~rc1-0ubuntu1
[ Adam Gandelman ]
* Ensure /etc/nova/rootwrap.d/ is only writable by root, ensure
  those permissions on /etc/nova/rootwrap.conf as well as
  all individual filter configurations.

[ Chuck Short ]
* Fix lintian warnings
* debian/*.lograote: compress logfiles when they are rotated. (LP:
  #1049915)
* debian/control: 
  - Suggest ceph-common for nova-volume.
  - Add python-cinderclient as a build depends.

[Vishvananda Ishaya]
* Split up vncproxy and xvpvncproxy.

Show diffs side-by-side

added added

removed removed

Lines of Context:
47
47
from sqlalchemy.sql import func
48
48
 
49
49
FLAGS = flags.FLAGS
50
 
flags.DECLARE('reserved_host_disk_mb', 'nova.scheduler.host_manager')
51
 
flags.DECLARE('reserved_host_memory_mb', 'nova.scheduler.host_manager')
52
50
 
53
51
LOG = logging.getLogger(__name__)
54
52
 
188
186
    return wrapper
189
187
 
190
188
 
191
 
def model_query(context, *args, **kwargs):
 
189
def model_query(context, model, *args, **kwargs):
192
190
    """Query helper that accounts for context's `read_deleted` field.
193
191
 
194
192
    :param context: context to query under
195
193
    :param session: if present, the session to use
196
194
    :param read_deleted: if present, overrides context's read_deleted field.
197
195
    :param project_only: if present and context is user-type, then restrict
198
 
            query to match the context's project_id.
 
196
            query to match the context's project_id. If set to 'allow_none',
 
197
            restriction includes project_id = None.
199
198
    """
200
199
    session = kwargs.get('session') or get_session()
201
200
    read_deleted = kwargs.get('read_deleted') or context.read_deleted
202
 
    project_only = kwargs.get('project_only')
 
201
    project_only = kwargs.get('project_only', False)
203
202
 
204
 
    query = session.query(*args)
 
203
    query = session.query(model, *args)
205
204
 
206
205
    if read_deleted == 'no':
207
206
        query = query.filter_by(deleted=False)
213
212
        raise Exception(
214
213
                _("Unrecognized read_deleted value '%s'") % read_deleted)
215
214
 
216
 
    if project_only and is_user_context(context):
217
 
        query = query.filter_by(project_id=context.project_id)
 
215
    if is_user_context(context) and project_only:
 
216
        if project_only == 'allow_none':
 
217
            query = query.filter(or_(model.project_id == context.project_id,
 
218
                                     model.project_id == None))
 
219
        else:
 
220
            query = query.filter_by(project_id=context.project_id)
218
221
 
219
222
    return query
220
223
 
734
737
            session.add(model)
735
738
 
736
739
 
 
740
def _ip_range_splitter(ips, block_size=256):
 
741
    """Yields blocks of IPs no more than block_size elements long."""
 
742
    out = []
 
743
    count = 0
 
744
    for ip in ips:
 
745
        out.append(ip['address'])
 
746
        count += 1
 
747
 
 
748
        if count > block_size - 1:
 
749
            yield out
 
750
            out = []
 
751
            count = 0
 
752
 
 
753
    if out:
 
754
        yield out
 
755
 
 
756
 
 
757
@require_context
 
758
def floating_ip_bulk_destroy(context, ips):
 
759
    session = get_session()
 
760
    with session.begin():
 
761
        for ip_block in _ip_range_splitter(ips):
 
762
            model_query(context, models.FloatingIp).\
 
763
                filter(models.FloatingIp.address.in_(ip_block)).\
 
764
                update({'deleted': True,
 
765
                        'deleted_at': timeutils.utcnow()},
 
766
                       synchronize_session='fetch')
 
767
 
 
768
 
737
769
@require_context
738
770
def floating_ip_create(context, values, session=None):
739
771
    if not session:
838
870
        floating_ip_ref.save(session=session)
839
871
 
840
872
 
841
 
def _floating_ip_get_all(context):
842
 
    return model_query(context, models.FloatingIp, read_deleted="no")
 
873
def _floating_ip_get_all(context, session=None):
 
874
    return model_query(context, models.FloatingIp, read_deleted="no",
 
875
                       session=session)
843
876
 
844
877
 
845
878
@require_admin_context
1388
1421
 
1389
1422
    def _get_sec_group_models(session, security_groups):
1390
1423
        models = []
1391
 
        default_group = security_group_ensure_default(context,
1392
 
                session=session)
 
1424
        _existed, default_group = security_group_ensure_default(context,
 
1425
            session=session)
1393
1426
        if 'default' in security_groups:
1394
1427
            models.append(default_group)
1395
1428
            # Generate a new list, so we don't modify the original
1561
1594
    query_prefix = regex_filter(query_prefix, models.Instance, filters)
1562
1595
 
1563
1596
    # paginate query
 
1597
    if marker is not None:
 
1598
        try:
 
1599
            marker = instance_get_by_uuid(context, marker, session=session)
 
1600
        except exception.InstanceNotFound as e:
 
1601
            raise exception.MarkerNotFound(marker)
1564
1602
    query_prefix = paginate_query(query_prefix, models.Instance, limit,
1565
1603
                           [sort_key, 'created_at', 'id'],
1566
1604
                           marker=marker,
2100
2138
 
2101
2139
 
2102
2140
@require_context
2103
 
def network_get(context, network_id, session=None):
 
2141
def network_get(context, network_id, session=None, project_only='allow_none'):
2104
2142
    result = model_query(context, models.Network, session=session,
2105
 
                         project_only=True).\
 
2143
                         project_only=project_only).\
2106
2144
                    filter_by(id=network_id).\
2107
2145
                    first()
2108
2146
 
2122
2160
    return result
2123
2161
 
2124
2162
 
2125
 
@require_admin_context
2126
 
def network_get_all_by_uuids(context, network_uuids, project_id=None):
2127
 
    project_or_none = or_(models.Network.project_id == project_id,
2128
 
                          models.Network.project_id == None)
2129
 
    result = model_query(context, models.Network, read_deleted="no").\
 
2163
@require_context
 
2164
def network_get_all_by_uuids(context, network_uuids,
 
2165
                             project_only="allow_none"):
 
2166
    result = model_query(context, models.Network, read_deleted="no",
 
2167
                         project_only=project_only).\
2130
2168
                filter(models.Network.uuid.in_(network_uuids)).\
2131
 
                filter(project_or_none).\
2132
2169
                all()
2133
2170
 
2134
2171
    if not result:
2135
2172
        raise exception.NoNetworksFound()
2136
2173
 
2137
 
    #check if host is set to all of the networks
2138
 
    # returned in the result
2139
 
    for network in result:
2140
 
        if network['host'] is None:
2141
 
            raise exception.NetworkHostNotSet(network_id=network['id'])
2142
 
 
2143
2174
    #check if the result contains all the networks
2144
2175
    #we are looking for
2145
2176
    for network_uuid in network_uuids:
2149
2180
                found = True
2150
2181
                break
2151
2182
        if not found:
2152
 
            if project_id:
 
2183
            if project_only:
2153
2184
                raise exception.NetworkNotFoundForProject(
2154
2185
                      network_uuid=network_uuid, project_id=context.project_id)
2155
2186
            raise exception.NetworkNotFound(network_id=network_uuid)
2903
2934
    with session.begin():
2904
2935
        volume_ref.save(session=session)
2905
2936
 
2906
 
    return volume_ref
 
2937
    return volume_get(context, values['id'], session=session)
2907
2938
 
2908
2939
 
2909
2940
@require_admin_context
3499
3530
 
3500
3531
 
3501
3532
def security_group_ensure_default(context, session=None):
3502
 
    """Ensure default security group exists for a project_id."""
 
3533
    """Ensure default security group exists for a project_id.
 
3534
 
 
3535
    Returns a tuple with the first element being a bool indicating
 
3536
    if the default security group previously existed. Second
 
3537
    element is the dict used to create the default security group.
 
3538
    """
3503
3539
    try:
3504
3540
        default_group = security_group_get_by_name(context,
3505
3541
                context.project_id, 'default',
3506
3542
                columns_to_join=[], session=session)
 
3543
        return (True, default_group)
3507
3544
    except exception.NotFound:
3508
3545
        values = {'name': 'default',
3509
3546
                  'description': 'default',
3511
3548
                  'project_id': context.project_id}
3512
3549
        default_group = security_group_create(context, values,
3513
3550
                session=session)
3514
 
    return default_group
 
3551
        return (False, default_group)
3515
3552
 
3516
3553
 
3517
3554
@require_context
4822
4859
                                     models.Aggregate.name,
4823
4860
                                     values['name'],
4824
4861
                                     session=session,
4825
 
                                     read_deleted='yes').first()
 
4862
                                     read_deleted='no').first()
4826
4863
    if not aggregate:
4827
4864
        aggregate = models.Aggregate()
4828
4865
        aggregate.update(values)
4829
4866
        aggregate.save(session=session)
4830
 
    elif aggregate.deleted:
4831
 
        values['deleted'] = False
4832
 
        values['deleted_at'] = None
4833
 
        aggregate.update(values)
4834
 
        aggregate.save(session=session)
4835
4867
    else:
4836
4868
        raise exception.AggregateNameExists(aggregate_name=values['name'])
4837
4869
    if metadata:
4916
4948
    else:
4917
4949
        raise exception.AggregateNotFound(aggregate_id=aggregate_id)
4918
4950
 
 
4951
    #Delete Metadata
 
4952
    rows = model_query(context,
 
4953
                       models.AggregateMetadata).\
 
4954
                       filter_by(aggregate_id=aggregate_id).\
 
4955
                       update({'deleted': True,
 
4956
                      'deleted_at': timeutils.utcnow(),
 
4957
                      'updated_at': literal_column('updated_at')})
 
4958
 
4919
4959
 
4920
4960
@require_admin_context
4921
4961
def aggregate_get_all(context):