1
# vim: tabstop=4 shiftwidth=4 softtabstop=4
3
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
4
# Copyright 2010 United States Government as represented by the
5
# Administrator of the National Aeronautics and Space Administration.
8
# Licensed under the Apache License, Version 2.0 (the "License"); you may
9
# not use this file except in compliance with the License. You may obtain
10
# a copy of the License at
12
# http://www.apache.org/licenses/LICENSE-2.0
14
# Unless required by applicable law or agreed to in writing, software
15
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
16
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
17
# License for the specific language governing permissions and limitations
20
"""Implementation of SQLAlchemy backend."""
27
from nova import block_device
29
from nova import exception
30
from nova import flags
31
from nova import utils
32
from nova import log as logging
33
from nova.compute import aggregate_states
34
from nova.compute import vm_states
35
from nova.db.sqlalchemy import models
36
from nova.db.sqlalchemy.session import get_session
37
from sqlalchemy import and_
38
from sqlalchemy import or_
39
from sqlalchemy.exc import IntegrityError
40
from sqlalchemy.orm import joinedload
41
from sqlalchemy.orm import joinedload_all
42
from sqlalchemy.sql import func
43
from sqlalchemy.sql.expression import asc
44
from sqlalchemy.sql.expression import desc
45
from sqlalchemy.sql.expression import literal_column
48
flags.DECLARE('reserved_host_disk_mb', 'nova.scheduler.host_manager')
49
flags.DECLARE('reserved_host_memory_mb', 'nova.scheduler.host_manager')
51
LOG = logging.getLogger(__name__)
54
def is_admin_context(context):
55
"""Indicates if the request context is an administrator."""
57
warnings.warn(_('Use of empty request context is deprecated'),
59
raise Exception('die')
60
return context.is_admin
63
def is_user_context(context):
64
"""Indicates if the request context is a normal user."""
69
if not context.user_id or not context.project_id:
74
def authorize_project_context(context, project_id):
75
"""Ensures a request has permission to access the given project."""
76
if is_user_context(context):
77
if not context.project_id:
78
raise exception.NotAuthorized()
79
elif context.project_id != project_id:
80
raise exception.NotAuthorized()
83
def authorize_user_context(context, user_id):
84
"""Ensures a request has permission to access the given user."""
85
if is_user_context(context):
86
if not context.user_id:
87
raise exception.NotAuthorized()
88
elif context.user_id != user_id:
89
raise exception.NotAuthorized()
92
def require_admin_context(f):
93
"""Decorator to require admin request context.
95
The first argument to the wrapped function must be the context.
99
def wrapper(*args, **kwargs):
100
if not is_admin_context(args[0]):
101
raise exception.AdminRequired()
102
return f(*args, **kwargs)
106
def require_context(f):
107
"""Decorator to require *any* user or admin context.
109
This does no authorization for user or project access matching, see
110
:py:func:`authorize_project_context` and
111
:py:func:`authorize_user_context`.
113
The first argument to the wrapped function must be the context.
117
def wrapper(*args, **kwargs):
118
if not is_admin_context(args[0]) and not is_user_context(args[0]):
119
raise exception.NotAuthorized()
120
return f(*args, **kwargs)
124
def require_instance_exists(f):
125
"""Decorator to require the specified instance to exist.
127
Requires the wrapped function to use context and instance_id as
128
their first two arguments.
131
def wrapper(context, instance_id, *args, **kwargs):
132
db.instance_get(context, instance_id)
133
return f(context, instance_id, *args, **kwargs)
134
wrapper.__name__ = f.__name__
138
def require_volume_exists(f):
139
"""Decorator to require the specified volume to exist.
141
Requires the wrapped function to use context and volume_id as
142
their first two arguments.
145
def wrapper(context, volume_id, *args, **kwargs):
146
db.volume_get(context, volume_id)
147
return f(context, volume_id, *args, **kwargs)
148
wrapper.__name__ = f.__name__
152
def require_aggregate_exists(f):
153
"""Decorator to require the specified aggregate to exist.
155
Requires the wrapped function to use context and aggregate_id as
156
their first two arguments.
160
def wrapper(context, aggregate_id, *args, **kwargs):
161
db.aggregate_get(context, aggregate_id)
162
return f(context, aggregate_id, *args, **kwargs)
166
def model_query(context, *args, **kwargs):
167
"""Query helper that accounts for context's `read_deleted` field.
169
:param context: context to query under
170
:param session: if present, the session to use
171
:param read_deleted: if present, overrides context's read_deleted field.
172
:param project_only: if present and context is user-type, then restrict
173
query to match the context's project_id.
175
session = kwargs.get('session') or get_session()
176
read_deleted = kwargs.get('read_deleted') or context.read_deleted
177
project_only = kwargs.get('project_only')
179
query = session.query(*args)
181
if read_deleted == 'no':
182
query = query.filter_by(deleted=False)
183
elif read_deleted == 'yes':
184
pass # omit the filter to include deleted and active
185
elif read_deleted == 'only':
186
query = query.filter_by(deleted=True)
189
_("Unrecognized read_deleted value '%s'") % read_deleted)
191
if project_only and is_user_context(context):
192
query = query.filter_by(project_id=context.project_id)
197
def exact_filter(query, model, filters, legal_keys):
198
"""Applies exact match filtering to a query.
200
Returns the updated query. Modifies filters argument to remove
203
:param query: query to apply filters to
204
:param model: model object the query applies to, for IN-style
206
:param filters: dictionary of filters; values that are lists,
207
tuples, sets, or frozensets cause an 'IN' test to
208
be performed, while exact matching ('==' operator)
209
is used for other values
210
:param legal_keys: list of keys to apply exact filtering to
215
# Walk through all the keys
216
for key in legal_keys:
217
# Skip ones we're not filtering on
218
if key not in filters:
221
# OK, filtering on this key; what value do we search for?
222
value = filters.pop(key)
224
if isinstance(value, (list, tuple, set, frozenset)):
225
# Looking for values in a list; apply to query directly
226
column_attr = getattr(model, key)
227
query = query.filter(column_attr.in_(value))
229
# OK, simple exact match; save for later
230
filter_dict[key] = value
232
# Apply simple exact matches
234
query = query.filter_by(**filter_dict)
242
@require_admin_context
243
def service_destroy(context, service_id):
244
session = get_session()
245
with session.begin():
246
service_ref = service_get(context, service_id, session=session)
247
service_ref.delete(session=session)
249
if service_ref.topic == 'compute' and service_ref.compute_node:
250
for c in service_ref.compute_node:
251
c.delete(session=session)
254
@require_admin_context
255
def service_get(context, service_id, session=None):
256
result = model_query(context, models.Service, session=session).\
257
options(joinedload('compute_node')).\
258
filter_by(id=service_id).\
261
raise exception.ServiceNotFound(service_id=service_id)
266
@require_admin_context
267
def service_get_all(context, disabled=None):
268
query = model_query(context, models.Service)
270
if disabled is not None:
271
query = query.filter_by(disabled=disabled)
276
@require_admin_context
277
def service_get_all_by_topic(context, topic):
278
return model_query(context, models.Service, read_deleted="no").\
279
filter_by(disabled=False).\
280
filter_by(topic=topic).\
284
@require_admin_context
285
def service_get_by_host_and_topic(context, host, topic):
286
return model_query(context, models.Service, read_deleted="no").\
287
filter_by(disabled=False).\
288
filter_by(host=host).\
289
filter_by(topic=topic).\
293
@require_admin_context
294
def service_get_all_by_host(context, host):
295
return model_query(context, models.Service, read_deleted="no").\
296
filter_by(host=host).\
300
@require_admin_context
301
def service_get_all_compute_by_host(context, host):
302
result = model_query(context, models.Service, read_deleted="no").\
303
options(joinedload('compute_node')).\
304
filter_by(host=host).\
305
filter_by(topic="compute").\
309
raise exception.ComputeHostNotFound(host=host)
314
@require_admin_context
315
def _service_get_all_topic_subquery(context, session, topic, subq, label):
316
sort_value = getattr(subq.c, label)
317
return model_query(context, models.Service,
318
func.coalesce(sort_value, 0),
319
session=session, read_deleted="no").\
320
filter_by(topic=topic).\
321
filter_by(disabled=False).\
322
outerjoin((subq, models.Service.host == subq.c.host)).\
323
order_by(sort_value).\
327
@require_admin_context
328
def service_get_all_compute_sorted(context):
329
session = get_session()
330
with session.begin():
331
# NOTE(vish): The intended query is below
332
# SELECT services.*, COALESCE(inst_cores.instance_cores,
334
# FROM services LEFT OUTER JOIN
335
# (SELECT host, SUM(instances.vcpus) AS instance_cores
336
# FROM instances GROUP BY host) AS inst_cores
337
# ON services.host = inst_cores.host
339
label = 'instance_cores'
340
subq = model_query(context, models.Instance.host,
341
func.sum(models.Instance.vcpus).label(label),
342
session=session, read_deleted="no").\
343
group_by(models.Instance.host).\
345
return _service_get_all_topic_subquery(context,
352
@require_admin_context
353
def service_get_all_volume_sorted(context):
354
session = get_session()
355
with session.begin():
357
label = 'volume_gigabytes'
358
subq = model_query(context, models.Volume.host,
359
func.sum(models.Volume.size).label(label),
360
session=session, read_deleted="no").\
361
group_by(models.Volume.host).\
363
return _service_get_all_topic_subquery(context,
370
@require_admin_context
371
def service_get_by_args(context, host, binary):
372
result = model_query(context, models.Service).\
373
filter_by(host=host).\
374
filter_by(binary=binary).\
378
raise exception.HostBinaryNotFound(host=host, binary=binary)
383
@require_admin_context
384
def service_create(context, values):
385
service_ref = models.Service()
386
service_ref.update(values)
387
if not FLAGS.enable_new_services:
388
service_ref.disabled = True
393
@require_admin_context
394
def service_update(context, service_id, values):
395
session = get_session()
396
with session.begin():
397
service_ref = service_get(context, service_id, session=session)
398
service_ref.update(values)
399
service_ref.save(session=session)
405
@require_admin_context
406
def compute_node_get(context, compute_id, session=None):
407
result = model_query(context, models.ComputeNode, session=session).\
408
filter_by(id=compute_id).\
412
raise exception.ComputeHostNotFound(host=compute_id)
417
@require_admin_context
418
def compute_node_get_all(context, session=None):
419
return model_query(context, models.ComputeNode, session=session).\
420
options(joinedload('service')).\
424
def _get_host_utilization(context, host, ram_mb, disk_gb):
425
"""Compute the current utilization of a given host."""
426
instances = instance_get_all_by_host(context, host)
428
free_ram_mb = ram_mb - FLAGS.reserved_host_memory_mb
429
free_disk_gb = disk_gb - (FLAGS.reserved_host_disk_mb * 1024)
432
for instance in instances:
433
free_ram_mb -= instance.memory_mb
434
free_disk_gb -= instance.root_gb
435
free_disk_gb -= instance.ephemeral_gb
436
if instance.vm_state in [vm_states.BUILDING, vm_states.REBUILDING,
437
vm_states.MIGRATING, vm_states.RESIZING]:
439
return dict(free_ram_mb=free_ram_mb,
440
free_disk_gb=free_disk_gb,
441
current_workload=work,
445
def _adjust_compute_node_values_for_utilization(context, values, session):
446
service_ref = service_get(context, values['service_id'], session=session)
447
host = service_ref['host']
448
ram_mb = values['memory_mb']
449
disk_gb = values['local_gb']
450
values.update(_get_host_utilization(context, host, ram_mb, disk_gb))
453
@require_admin_context
454
def compute_node_create(context, values, session=None):
455
"""Creates a new ComputeNode and populates the capacity fields
456
with the most recent data."""
458
session = get_session()
460
_adjust_compute_node_values_for_utilization(context, values, session)
461
with session.begin(subtransactions=True):
462
compute_node_ref = models.ComputeNode()
463
session.add(compute_node_ref)
464
compute_node_ref.update(values)
465
return compute_node_ref
468
@require_admin_context
469
def compute_node_update(context, compute_id, values, auto_adjust):
470
"""Creates a new ComputeNode and populates the capacity fields
471
with the most recent data."""
472
session = get_session()
474
_adjust_compute_node_values_for_utilization(context, values, session)
475
with session.begin(subtransactions=True):
476
compute_ref = compute_node_get(context, compute_id, session=session)
477
compute_ref.update(values)
478
compute_ref.save(session=session)
481
def compute_node_get_by_host(context, host):
482
"""Get all capacity entries for the given host."""
483
session = get_session()
484
with session.begin():
485
node = session.query(models.ComputeNode).\
486
options(joinedload('service')).\
487
filter(models.Service.host == host).\
488
filter_by(deleted=False)
492
def compute_node_utilization_update(context, host, free_ram_mb_delta=0,
493
free_disk_gb_delta=0, work_delta=0, vm_delta=0):
494
"""Update a specific ComputeNode entry by a series of deltas.
495
Do this as a single atomic action and lock the row for the
496
duration of the operation. Requires that ComputeNode record exist."""
497
session = get_session()
499
with session.begin(subtransactions=True):
500
compute_node = session.query(models.ComputeNode).\
501
options(joinedload('service')).\
502
filter(models.Service.host == host).\
503
filter_by(deleted=False).\
504
with_lockmode('update').\
506
if compute_node is None:
507
raise exception.NotFound(_("No ComputeNode for %(host)s") %
510
# This table thingy is how we get atomic UPDATE x = x + 1
512
table = models.ComputeNode.__table__
513
if free_ram_mb_delta != 0:
514
compute_node.free_ram_mb = table.c.free_ram_mb + free_ram_mb_delta
515
if free_disk_gb_delta != 0:
516
compute_node.free_disk_gb = (table.c.free_disk_gb +
519
compute_node.current_workload = (table.c.current_workload +
522
compute_node.running_vms = table.c.running_vms + vm_delta
526
def compute_node_utilization_set(context, host, free_ram_mb=None,
527
free_disk_gb=None, work=None, vms=None):
528
"""Like compute_node_utilization_update() modify a specific host
529
entry. But this function will set the metrics absolutely
530
(vs. a delta update).
532
session = get_session()
534
with session.begin(subtransactions=True):
535
compute_node = session.query(models.ComputeNode).\
536
options(joinedload('service')).\
537
filter(models.Service.host == host).\
538
filter_by(deleted=False).\
539
with_lockmode('update').\
541
if compute_node is None:
542
raise exception.NotFound(_("No ComputeNode for %(host)s") %
545
if free_ram_mb != None:
546
compute_node.free_ram_mb = free_ram_mb
547
if free_disk_gb != None:
548
compute_node.free_disk_gb = free_disk_gb
550
compute_node.current_workload = work
552
compute_node.running_vms = vms
560
@require_admin_context
561
def certificate_get(context, certificate_id, session=None):
562
result = model_query(context, models.Certificate, session=session).\
563
filter_by(id=certificate_id).\
567
raise exception.CertificateNotFound(certificate_id=certificate_id)
572
@require_admin_context
573
def certificate_create(context, values):
574
certificate_ref = models.Certificate()
575
for (key, value) in values.iteritems():
576
certificate_ref[key] = value
577
certificate_ref.save()
578
return certificate_ref
581
@require_admin_context
582
def certificate_get_all_by_project(context, project_id):
583
return model_query(context, models.Certificate, read_deleted="no").\
584
filter_by(project_id=project_id).\
588
@require_admin_context
589
def certificate_get_all_by_user(context, user_id):
590
return model_query(context, models.Certificate, read_deleted="no").\
591
filter_by(user_id=user_id).\
595
@require_admin_context
596
def certificate_get_all_by_user_and_project(context, user_id, project_id):
597
return model_query(context, models.Certificate, read_deleted="no").\
598
filter_by(user_id=user_id).\
599
filter_by(project_id=project_id).\
607
def floating_ip_get(context, id):
608
result = model_query(context, models.FloatingIp, project_only=True).\
613
raise exception.FloatingIpNotFound(id=id)
619
def floating_ip_get_pools(context):
620
session = get_session()
622
for result in session.query(models.FloatingIp.pool).distinct():
623
pools.append({'name': result[0]})
628
def floating_ip_allocate_address(context, project_id, pool):
629
authorize_project_context(context, project_id)
630
session = get_session()
631
with session.begin():
632
floating_ip_ref = model_query(context, models.FloatingIp,
633
session=session, read_deleted="no").\
634
filter_by(fixed_ip_id=None).\
635
filter_by(project_id=None).\
636
filter_by(pool=pool).\
637
with_lockmode('update').\
639
# NOTE(vish): if with_lockmode isn't supported, as in sqlite,
640
# then this has concurrency issues
641
if not floating_ip_ref:
642
raise exception.NoMoreFloatingIps()
643
floating_ip_ref['project_id'] = project_id
644
session.add(floating_ip_ref)
645
return floating_ip_ref['address']
649
def floating_ip_create(context, values):
650
floating_ip_ref = models.FloatingIp()
651
floating_ip_ref.update(values)
652
floating_ip_ref.save()
653
return floating_ip_ref['address']
657
def floating_ip_count_by_project(context, project_id):
658
authorize_project_context(context, project_id)
659
# TODO(tr3buchet): why leave auto_assigned floating IPs out?
660
return model_query(context, models.FloatingIp, read_deleted="no").\
661
filter_by(project_id=project_id).\
662
filter_by(auto_assigned=False).\
667
def floating_ip_fixed_ip_associate(context, floating_address,
668
fixed_address, host):
669
session = get_session()
670
with session.begin():
671
floating_ip_ref = floating_ip_get_by_address(context,
674
fixed_ip_ref = fixed_ip_get_by_address(context,
677
floating_ip_ref.fixed_ip_id = fixed_ip_ref["id"]
678
floating_ip_ref.host = host
679
floating_ip_ref.save(session=session)
683
def floating_ip_deallocate(context, address):
684
session = get_session()
685
with session.begin():
686
floating_ip_ref = floating_ip_get_by_address(context,
689
floating_ip_ref['project_id'] = None
690
floating_ip_ref['host'] = None
691
floating_ip_ref['auto_assigned'] = False
692
floating_ip_ref.save(session=session)
696
def floating_ip_destroy(context, address):
697
session = get_session()
698
with session.begin():
699
floating_ip_ref = floating_ip_get_by_address(context,
702
floating_ip_ref.delete(session=session)
706
def floating_ip_disassociate(context, address):
707
session = get_session()
708
with session.begin():
709
floating_ip_ref = floating_ip_get_by_address(context,
712
fixed_ip_ref = fixed_ip_get(context,
713
floating_ip_ref['fixed_ip_id'])
715
fixed_ip_address = fixed_ip_ref['address']
717
fixed_ip_address = None
718
floating_ip_ref.fixed_ip_id = None
719
floating_ip_ref.host = None
720
floating_ip_ref.save(session=session)
721
return fixed_ip_address
725
def floating_ip_set_auto_assigned(context, address):
726
session = get_session()
727
with session.begin():
728
floating_ip_ref = floating_ip_get_by_address(context,
731
floating_ip_ref.auto_assigned = True
732
floating_ip_ref.save(session=session)
735
def _floating_ip_get_all(context):
736
return model_query(context, models.FloatingIp, read_deleted="no")
739
@require_admin_context
740
def floating_ip_get_all(context):
741
floating_ip_refs = _floating_ip_get_all(context).all()
742
if not floating_ip_refs:
743
raise exception.NoFloatingIpsDefined()
744
return floating_ip_refs
747
@require_admin_context
748
def floating_ip_get_all_by_host(context, host):
749
floating_ip_refs = _floating_ip_get_all(context).\
750
filter_by(host=host).\
752
if not floating_ip_refs:
753
raise exception.FloatingIpNotFoundForHost(host=host)
754
return floating_ip_refs
758
def floating_ip_get_all_by_project(context, project_id):
759
authorize_project_context(context, project_id)
760
# TODO(tr3buchet): why do we not want auto_assigned floating IPs here?
761
return _floating_ip_get_all(context).\
762
filter_by(project_id=project_id).\
763
filter_by(auto_assigned=False).\
768
def floating_ip_get_by_address(context, address, session=None):
769
result = model_query(context, models.FloatingIp, session=session).\
770
filter_by(address=address).\
774
raise exception.FloatingIpNotFoundForAddress(address=address)
776
# If the floating IP has a project ID set, check to make sure
777
# the non-admin user has access.
778
if result.project_id and is_user_context(context):
779
authorize_project_context(context, result.project_id)
785
def floating_ip_get_by_fixed_address(context, fixed_address, session=None):
787
session = get_session()
789
fixed_ip = fixed_ip_get_by_address(context, fixed_address, session)
790
fixed_ip_id = fixed_ip['id']
792
return model_query(context, models.FloatingIp, session=session).\
793
filter_by(fixed_ip_id=fixed_ip_id).\
796
# NOTE(tr3buchet) please don't invent an exception here, empty list is fine
800
def floating_ip_get_by_fixed_ip_id(context, fixed_ip_id, session=None):
802
session = get_session()
804
return model_query(context, models.FloatingIp, session=session).\
805
filter_by(fixed_ip_id=fixed_ip_id).\
810
def floating_ip_update(context, address, values):
811
session = get_session()
812
with session.begin():
813
floating_ip_ref = floating_ip_get_by_address(context, address, session)
814
for (key, value) in values.iteritems():
815
floating_ip_ref[key] = value
816
floating_ip_ref.save(session=session)
820
def _dnsdomain_get(context, session, fqdomain):
821
return model_query(context, models.DNSDomain,
822
session=session, read_deleted="no").\
823
filter_by(domain=fqdomain).\
824
with_lockmode('update').\
829
def dnsdomain_get(context, fqdomain):
830
session = get_session()
831
with session.begin():
832
return _dnsdomain_get(context, session, fqdomain)
835
@require_admin_context
836
def _dnsdomain_get_or_create(context, session, fqdomain):
837
domain_ref = _dnsdomain_get(context, session, fqdomain)
839
dns_ref = models.DNSDomain()
840
dns_ref.update({'domain': fqdomain,
841
'availability_zone': None,
848
@require_admin_context
849
def dnsdomain_register_for_zone(context, fqdomain, zone):
850
session = get_session()
851
with session.begin():
852
domain_ref = _dnsdomain_get_or_create(context, session, fqdomain)
853
domain_ref.scope = 'private'
854
domain_ref.availability_zone = zone
855
domain_ref.save(session=session)
858
@require_admin_context
859
def dnsdomain_register_for_project(context, fqdomain, project):
860
session = get_session()
861
with session.begin():
862
domain_ref = _dnsdomain_get_or_create(context, session, fqdomain)
863
domain_ref.scope = 'public'
864
domain_ref.project_id = project
865
domain_ref.save(session=session)
868
@require_admin_context
869
def dnsdomain_unregister(context, fqdomain):
870
session = get_session()
871
with session.begin():
872
session.query(models.DNSDomain).\
873
filter_by(domain=fqdomain).\
878
def dnsdomain_list(context):
879
session = get_session()
880
records = model_query(context, models.DNSDomain,
881
session=session, read_deleted="no").\
882
with_lockmode('update').all()
884
for record in records:
885
domains.append(record.domain)
893
@require_admin_context
894
def fixed_ip_associate(context, address, instance_id, network_id=None,
896
"""Keyword arguments:
897
reserved -- should be a boolean value(True or False), exact value will be
898
used to filter on the fixed ip address
900
session = get_session()
901
with session.begin():
902
network_or_none = or_(models.FixedIp.network_id == network_id,
903
models.FixedIp.network_id == None)
904
fixed_ip_ref = model_query(context, models.FixedIp, session=session,
906
filter(network_or_none).\
907
filter_by(reserved=reserved).\
908
filter_by(address=address).\
909
with_lockmode('update').\
911
# NOTE(vish): if with_lockmode isn't supported, as in sqlite,
912
# then this has concurrency issues
913
if fixed_ip_ref is None:
914
raise exception.FixedIpNotFoundForNetwork(address=address,
915
network_id=network_id)
916
if fixed_ip_ref.instance_id:
917
raise exception.FixedIpAlreadyInUse(address=address)
919
if not fixed_ip_ref.network_id:
920
fixed_ip_ref.network_id = network_id
921
fixed_ip_ref.instance_id = instance_id
922
session.add(fixed_ip_ref)
923
return fixed_ip_ref['address']
926
@require_admin_context
927
def fixed_ip_associate_pool(context, network_id, instance_id=None, host=None):
928
session = get_session()
929
with session.begin():
930
network_or_none = or_(models.FixedIp.network_id == network_id,
931
models.FixedIp.network_id == None)
932
fixed_ip_ref = model_query(context, models.FixedIp, session=session,
934
filter(network_or_none).\
935
filter_by(reserved=False).\
936
filter_by(instance_id=None).\
937
filter_by(host=None).\
938
with_lockmode('update').\
940
# NOTE(vish): if with_lockmode isn't supported, as in sqlite,
941
# then this has concurrency issues
943
raise exception.NoMoreFixedIps()
945
if fixed_ip_ref['network_id'] is None:
946
fixed_ip_ref['network'] = network_id
949
fixed_ip_ref['instance_id'] = instance_id
952
fixed_ip_ref['host'] = host
953
session.add(fixed_ip_ref)
954
return fixed_ip_ref['address']
958
def fixed_ip_create(context, values):
959
fixed_ip_ref = models.FixedIp()
960
fixed_ip_ref.update(values)
962
return fixed_ip_ref['address']
966
def fixed_ip_bulk_create(context, ips):
967
session = get_session()
968
with session.begin():
970
model = models.FixedIp()
976
def fixed_ip_disassociate(context, address):
977
session = get_session()
978
with session.begin():
979
fixed_ip_ref = fixed_ip_get_by_address(context,
982
fixed_ip_ref['instance_id'] = None
983
fixed_ip_ref.save(session=session)
986
@require_admin_context
987
def fixed_ip_disassociate_all_by_timeout(context, host, time):
988
session = get_session()
989
# NOTE(vish): only update fixed ips that "belong" to this
990
# host; i.e. the network host or the instance
991
# host matches. Two queries necessary because
992
# join with update doesn't work.
993
host_filter = or_(and_(models.Instance.host == host,
994
models.Network.multi_host == True),
995
models.Network.host == host)
996
result = session.query(models.FixedIp.id).\
997
filter(models.FixedIp.deleted == False).\
998
filter(models.FixedIp.allocated == False).\
999
filter(models.FixedIp.updated_at < time).\
1000
join((models.Network,
1001
models.Network.id == models.FixedIp.network_id)).\
1002
join((models.Instance,
1003
models.Instance.id == models.FixedIp.instance_id)).\
1004
filter(host_filter).\
1006
fixed_ip_ids = [fip[0] for fip in result]
1007
if not fixed_ip_ids:
1009
result = model_query(context, models.FixedIp, session=session).\
1010
filter(models.FixedIp.id.in_(fixed_ip_ids)).\
1011
update({'instance_id': None,
1013
'updated_at': utils.utcnow()},
1014
synchronize_session='fetch')
1019
def fixed_ip_get(context, id, session=None):
1020
result = model_query(context, models.FixedIp, session=session).\
1024
raise exception.FixedIpNotFound(id=id)
1026
# FIXME(sirp): shouldn't we just use project_only here to restrict the
1028
if is_user_context(context) and result['instance_id'] is not None:
1029
instance = instance_get(context, result['instance_id'], session)
1030
authorize_project_context(context, instance.project_id)
1035
@require_admin_context
1036
def fixed_ip_get_all(context, session=None):
1037
result = model_query(context, models.FixedIp, session=session,
1038
read_deleted="yes").\
1041
raise exception.NoFixedIpsDefined()
1047
def fixed_ip_get_by_address(context, address, session=None):
1048
result = model_query(context, models.FixedIp, session=session,
1049
read_deleted="yes").\
1050
filter_by(address=address).\
1053
raise exception.FixedIpNotFoundForAddress(address=address)
1055
# NOTE(sirp): shouldn't we just use project_only here to restrict the
1057
if is_user_context(context) and result['instance_id'] is not None:
1058
instance = instance_get(context, result['instance_id'], session)
1059
authorize_project_context(context, instance.project_id)
1065
def fixed_ip_get_by_instance(context, instance_id):
1066
result = model_query(context, models.FixedIp, read_deleted="no").\
1067
filter_by(instance_id=instance_id).\
1071
raise exception.FixedIpNotFoundForInstance(instance_id=instance_id)
1077
def fixed_ip_get_by_network_host(context, network_id, host):
1078
result = model_query(context, models.FixedIp, read_deleted="no").\
1079
filter_by(network_id=network_id).\
1080
filter_by(host=host).\
1084
raise exception.FixedIpNotFoundForNetworkHost(network_id=network_id,
1090
def fixed_ips_by_virtual_interface(context, vif_id):
1091
result = model_query(context, models.FixedIp, read_deleted="no").\
1092
filter_by(virtual_interface_id=vif_id).\
1098
@require_admin_context
1099
def fixed_ip_get_network(context, address):
1100
fixed_ip_ref = fixed_ip_get_by_address(context, address)
1101
return fixed_ip_ref.network
1105
def fixed_ip_update(context, address, values):
1106
session = get_session()
1107
with session.begin():
1108
fixed_ip_ref = fixed_ip_get_by_address(context,
1111
fixed_ip_ref.update(values)
1112
fixed_ip_ref.save(session=session)
1119
def virtual_interface_create(context, values):
1120
"""Create a new virtual interface record in the database.
1122
:param values: = dict containing column values
1125
vif_ref = models.VirtualInterface()
1126
vif_ref.update(values)
1128
except IntegrityError:
1129
raise exception.VirtualInterfaceCreateException()
1135
def _virtual_interface_query(context, session=None):
1136
return model_query(context, models.VirtualInterface, session=session,
1141
def virtual_interface_get(context, vif_id, session=None):
1142
"""Gets a virtual interface from the table.
1144
:param vif_id: = id of the virtual interface
1146
vif_ref = _virtual_interface_query(context, session=session).\
1147
filter_by(id=vif_id).\
1153
def virtual_interface_get_by_address(context, address):
1154
"""Gets a virtual interface from the table.
1156
:param address: = the address of the interface you're looking to get
1158
vif_ref = _virtual_interface_query(context).\
1159
filter_by(address=address).\
1165
def virtual_interface_get_by_uuid(context, vif_uuid):
1166
"""Gets a virtual interface from the table.
1168
:param vif_uuid: the uuid of the interface you're looking to get
1170
vif_ref = _virtual_interface_query(context).\
1171
filter_by(uuid=vif_uuid).\
1177
@require_instance_exists
1178
def virtual_interface_get_by_instance(context, instance_id):
1179
"""Gets all virtual interfaces for instance.
1181
:param instance_id: = id of the instance to retrieve vifs for
1183
vif_refs = _virtual_interface_query(context).\
1184
filter_by(instance_id=instance_id).\
1190
def virtual_interface_get_by_instance_and_network(context, instance_id,
1192
"""Gets virtual interface for instance that's associated with network."""
1193
vif_ref = _virtual_interface_query(context).\
1194
filter_by(instance_id=instance_id).\
1195
filter_by(network_id=network_id).\
1201
def virtual_interface_delete(context, vif_id):
1202
"""Delete virtual interface record from the database.
1204
:param vif_id: = id of vif to delete
1206
session = get_session()
1207
vif_ref = virtual_interface_get(context, vif_id, session)
1208
with session.begin():
1209
session.delete(vif_ref)
1213
def virtual_interface_delete_by_instance(context, instance_id):
1214
"""Delete virtual interface records that are associated
1215
with the instance given by instance_id.
1217
:param instance_id: = id of instance
1219
vif_refs = virtual_interface_get_by_instance(context, instance_id)
1220
for vif_ref in vif_refs:
1221
virtual_interface_delete(context, vif_ref['id'])
1225
def virtual_interface_get_all(context):
1227
vif_refs = _virtual_interface_query(context).all()
1234
def _metadata_refs(metadata_dict, meta_class):
1237
for k, v in metadata_dict.iteritems():
1238
metadata_ref = meta_class()
1239
metadata_ref['key'] = k
1240
metadata_ref['value'] = v
1241
metadata_refs.append(metadata_ref)
1242
return metadata_refs
1246
def instance_create(context, values):
1247
"""Create a new Instance record in the database.
1249
context - request context object
1250
values - dict containing column values.
1252
values = values.copy()
1253
values['metadata'] = _metadata_refs(values.get('metadata'),
1254
models.InstanceMetadata)
1255
instance_ref = models.Instance()
1256
if not values.get('uuid'):
1257
values['uuid'] = str(utils.gen_uuid())
1258
instance_ref.update(values)
1260
session = get_session()
1261
with session.begin():
1262
instance_ref.save(session=session)
1264
# and creat the info_cache table entry for instance
1265
instance_info_cache_create(context, {'instance_id': instance_ref['uuid']})
1270
@require_admin_context
1271
def instance_data_get_for_project(context, project_id):
1272
result = model_query(context,
1273
func.count(models.Instance.id),
1274
func.sum(models.Instance.vcpus),
1275
func.sum(models.Instance.memory_mb),
1276
read_deleted="no").\
1277
filter_by(project_id=project_id).\
1279
# NOTE(vish): convert None to 0
1280
return (result[0] or 0, result[1] or 0, result[2] or 0)
1284
def instance_destroy(context, instance_id):
1285
session = get_session()
1286
with session.begin():
1287
if utils.is_uuid_like(instance_id):
1288
instance_ref = instance_get_by_uuid(context, instance_id,
1290
instance_id = instance_ref['id']
1292
instance_ref = instance_get(context, instance_id,
1294
session.query(models.Instance).\
1295
filter_by(id=instance_id).\
1296
update({'deleted': True,
1297
'deleted_at': utils.utcnow(),
1298
'updated_at': literal_column('updated_at')})
1299
session.query(models.SecurityGroupInstanceAssociation).\
1300
filter_by(instance_id=instance_id).\
1301
update({'deleted': True,
1302
'deleted_at': utils.utcnow(),
1303
'updated_at': literal_column('updated_at')})
1304
session.query(models.InstanceMetadata).\
1305
filter_by(instance_id=instance_id).\
1306
update({'deleted': True,
1307
'deleted_at': utils.utcnow(),
1308
'updated_at': literal_column('updated_at')})
1309
session.query(models.BlockDeviceMapping).\
1310
filter_by(instance_id=instance_id).\
1311
update({'deleted': True,
1312
'deleted_at': utils.utcnow(),
1313
'updated_at': literal_column('updated_at')})
1315
instance_info_cache_delete(context, instance_ref['uuid'],
1321
def instance_get_by_uuid(context, uuid, session=None):
1322
result = _build_instance_get(context, session=session).\
1323
filter_by(uuid=uuid).\
1327
raise exception.InstanceNotFound(instance_id=uuid)
1333
def instance_get(context, instance_id, session=None):
1334
result = _build_instance_get(context, session=session).\
1335
filter_by(id=instance_id).\
1339
raise exception.InstanceNotFound(instance_id=instance_id)
1345
def _build_instance_get(context, session=None):
1346
return model_query(context, models.Instance, session=session,
1347
project_only=True).\
1348
options(joinedload_all('security_groups.rules')).\
1349
options(joinedload('info_cache')).\
1350
options(joinedload('volumes')).\
1351
options(joinedload('metadata')).\
1352
options(joinedload('instance_type'))
1355
@require_admin_context
1356
def instance_get_all(context):
1357
return model_query(context, models.Instance).\
1358
options(joinedload('info_cache')).\
1359
options(joinedload('security_groups')).\
1360
options(joinedload('metadata')).\
1361
options(joinedload('instance_type')).\
1366
def instance_get_all_by_filters(context, filters, sort_key, sort_dir):
1367
"""Return instances that match all filters. Deleted instances
1368
will be returned by default, unless there's a filter that says
1371
def _regexp_filter_by_metadata(instance, meta):
1372
inst_metadata = [{node['key']: node['value']}
1373
for node in instance['metadata']]
1374
if isinstance(meta, list):
1376
if node not in inst_metadata:
1378
elif isinstance(meta, dict):
1379
for k, v in meta.iteritems():
1380
if {k: v} not in inst_metadata:
1384
def _regexp_filter_by_column(instance, filter_name, filter_re):
1386
v = getattr(instance, filter_name)
1387
except AttributeError:
1389
if v and filter_re.match(str(v)):
1393
sort_fn = {'desc': desc, 'asc': asc}
1395
session = get_session()
1396
query_prefix = session.query(models.Instance).\
1397
options(joinedload('info_cache')).\
1398
options(joinedload('security_groups')).\
1399
options(joinedload('metadata')).\
1400
options(joinedload('instance_type')).\
1401
order_by(sort_fn[sort_dir](getattr(models.Instance, sort_key)))
1403
# Make a copy of the filters dictionary to use going forward, as we'll
1404
# be modifying it and we shouldn't affect the caller's use of it.
1405
filters = filters.copy()
1407
if 'changes-since' in filters:
1408
changes_since = utils.normalize_time(filters['changes-since'])
1409
query_prefix = query_prefix.\
1410
filter(models.Instance.updated_at > changes_since)
1412
if 'deleted' in filters:
1413
# Instances can be soft or hard deleted and the query needs to
1414
# include or exclude both
1415
if filters.pop('deleted'):
1416
deleted = or_(models.Instance.deleted == True,
1417
models.Instance.vm_state == vm_states.SOFT_DELETE)
1418
query_prefix = query_prefix.filter(deleted)
1420
query_prefix = query_prefix.\
1421
filter_by(deleted=False).\
1422
filter(models.Instance.vm_state != vm_states.SOFT_DELETE)
1424
if not context.is_admin:
1425
# If we're not admin context, add appropriate filter..
1426
if context.project_id:
1427
filters['project_id'] = context.project_id
1429
filters['user_id'] = context.user_id
1431
# Filters for exact matches that we can do along with the SQL query...
1432
# For other filters that don't match this, we will do regexp matching
1433
exact_match_filter_names = ['project_id', 'user_id', 'image_ref',
1434
'vm_state', 'instance_type_id', 'uuid']
1437
query_prefix = exact_filter(query_prefix, models.Instance,
1438
filters, exact_match_filter_names)
1440
instances = query_prefix.all()
1444
# Now filter on everything else for regexp matching..
1445
# For filters not in the list, we'll attempt to use the filter_name
1446
# as a column name in Instance..
1447
regexp_filter_funcs = {}
1449
for filter_name in filters.iterkeys():
1450
filter_func = regexp_filter_funcs.get(filter_name, None)
1451
filter_re = re.compile(str(filters[filter_name]))
1453
filter_l = lambda instance: filter_func(instance, filter_re)
1454
elif filter_name == 'metadata':
1455
filter_l = lambda instance: _regexp_filter_by_metadata(instance,
1456
filters[filter_name])
1458
filter_l = lambda instance: _regexp_filter_by_column(instance,
1459
filter_name, filter_re)
1460
instances = filter(filter_l, instances)
1468
def instance_get_active_by_window(context, begin, end=None, project_id=None):
1469
"""Return instances that were active during window."""
1470
session = get_session()
1471
query = session.query(models.Instance)
1473
query = query.filter(or_(models.Instance.terminated_at == None,
1474
models.Instance.terminated_at > begin))
1476
query = query.filter(models.Instance.launched_at < end)
1478
query = query.filter_by(project_id=project_id)
1483
@require_admin_context
1484
def instance_get_active_by_window_joined(context, begin, end=None,
1486
"""Return instances and joins that were active during window."""
1487
session = get_session()
1488
query = session.query(models.Instance)
1490
query = query.options(joinedload('info_cache')).\
1491
options(joinedload('security_groups')).\
1492
options(joinedload('metadata')).\
1493
options(joinedload('instance_type')).\
1494
filter(or_(models.Instance.terminated_at == None,
1495
models.Instance.terminated_at > begin))
1497
query = query.filter(models.Instance.launched_at < end)
1499
query = query.filter_by(project_id=project_id)
1504
@require_admin_context
1505
def _instance_get_all_query(context, project_only=False):
1506
return model_query(context, models.Instance, project_only=project_only).\
1507
options(joinedload('info_cache')).\
1508
options(joinedload('security_groups')).\
1509
options(joinedload('metadata')).\
1510
options(joinedload('instance_type'))
1513
@require_admin_context
1514
def instance_get_all_by_host(context, host):
1515
return _instance_get_all_query(context).filter_by(host=host).all()
1519
def instance_get_all_by_project(context, project_id):
1520
authorize_project_context(context, project_id)
1521
return _instance_get_all_query(context).\
1522
filter_by(project_id=project_id).\
1527
def instance_get_all_by_reservation(context, reservation_id):
1528
return _instance_get_all_query(context, project_only=True).\
1529
filter_by(reservation_id=reservation_id).\
1533
# NOTE(jkoelker) This is only being left here for compat with floating
1534
# ips. Currently the network_api doesn't return floaters
1535
# in network_info. Once it starts return the model. This
1536
# function and it's call in compute/manager.py on 1829 can
1539
def instance_get_floating_address(context, instance_id):
1540
fixed_ips = fixed_ip_get_by_instance(context, instance_id)
1543
# NOTE(tr3buchet): this only gets the first fixed_ip
1544
# won't find floating ips associated with other fixed_ips
1545
floating_ips = floating_ip_get_by_fixed_address(context,
1546
fixed_ips[0]['address'])
1547
if not floating_ips:
1549
# NOTE(vish): this just returns the first floating ip
1550
return floating_ips[0]['address']
1553
@require_admin_context
1554
def instance_get_all_hung_in_rebooting(context, reboot_window, session=None):
1555
reboot_window = datetime.datetime.utcnow() - datetime.timedelta(
1556
seconds=reboot_window)
1559
session = get_session()
1561
results = session.query(models.Instance).\
1562
filter(models.Instance.updated_at <= reboot_window).\
1563
filter_by(task_state="rebooting").all()
1569
def instance_test_and_set(context, instance_id, attr, ok_states,
1570
new_state, session=None):
1571
"""Atomically check if an instance is in a valid state, and if it is, set
1572
the instance into a new state.
1575
session = get_session()
1577
with session.begin():
1578
query = model_query(context, models.Instance, session=session,
1581
if utils.is_uuid_like(instance_id):
1582
query = query.filter_by(uuid=instance_id)
1584
query = query.filter_by(id=instance_id)
1586
# NOTE(vish): if with_lockmode isn't supported, as in sqlite,
1587
# then this has concurrency issues
1588
instance = query.with_lockmode('update').first()
1590
state = instance[attr]
1591
if state not in ok_states:
1592
raise exception.InstanceInvalidState(
1594
instance_uuid=instance['uuid'],
1596
method='instance_test_and_set')
1598
instance[attr] = new_state
1599
instance.save(session=session)
1603
def instance_update(context, instance_id, values):
1604
session = get_session()
1606
if utils.is_uuid_like(instance_id):
1607
instance_ref = instance_get_by_uuid(context, instance_id,
1610
instance_ref = instance_get(context, instance_id, session=session)
1612
metadata = values.get('metadata')
1613
if metadata is not None:
1614
instance_metadata_update(context,
1616
values.pop('metadata'),
1618
with session.begin():
1619
instance_ref.update(values)
1620
instance_ref.save(session=session)
1625
def instance_add_security_group(context, instance_uuid, security_group_id):
1626
"""Associate the given security group with the given instance"""
1627
session = get_session()
1628
with session.begin():
1629
instance_ref = instance_get_by_uuid(context, instance_uuid,
1631
security_group_ref = security_group_get(context,
1634
instance_ref.security_groups += [security_group_ref]
1635
instance_ref.save(session=session)
1639
def instance_remove_security_group(context, instance_uuid, security_group_id):
1640
"""Disassociate the given security group from the given instance"""
1641
session = get_session()
1642
instance_ref = instance_get_by_uuid(context, instance_uuid,
1644
session.query(models.SecurityGroupInstanceAssociation).\
1645
filter_by(instance_id=instance_ref['id']).\
1646
filter_by(security_group_id=security_group_id).\
1647
update({'deleted': True,
1648
'deleted_at': utils.utcnow(),
1649
'updated_at': literal_column('updated_at')})
1653
def instance_action_create(context, values):
1654
"""Create an instance action from the values dictionary."""
1655
action_ref = models.InstanceActions()
1656
action_ref.update(values)
1658
session = get_session()
1659
with session.begin():
1660
action_ref.save(session=session)
1664
@require_admin_context
1665
def instance_get_actions(context, instance_uuid):
1666
"""Return the actions associated to the given instance id"""
1667
session = get_session()
1668
return session.query(models.InstanceActions).\
1669
filter_by(instance_uuid=instance_uuid).\
1674
def instance_get_id_to_uuid_mapping(context, ids):
1675
session = get_session()
1676
instances = session.query(models.Instance).\
1677
filter(models.Instance.id.in_(ids)).\
1680
for instance in instances:
1681
mapping[instance['id']] = instance['uuid']
1689
def instance_info_cache_create(context, values):
1690
"""Create a new instance cache record in the table.
1692
:param context: = request context object
1693
:param values: = dict containing column values
1695
info_cache = models.InstanceInfoCache()
1696
info_cache.update(values)
1698
session = get_session()
1699
with session.begin():
1700
info_cache.save(session=session)
1705
def instance_info_cache_get(context, instance_uuid, session=None):
1706
"""Gets an instance info cache from the table.
1708
:param instance_uuid: = uuid of the info cache's instance
1709
:param session: = optional session object
1711
session = session or get_session()
1713
info_cache = session.query(models.InstanceInfoCache).\
1714
filter_by(instance_id=instance_uuid).\
1720
def instance_info_cache_update(context, instance_uuid, values,
1722
"""Update an instance info cache record in the table.
1724
:param instance_uuid: = uuid of info cache's instance
1725
:param values: = dict containing column values to update
1726
:param session: = optional session object
1728
session = session or get_session()
1729
info_cache = instance_info_cache_get(context, instance_uuid,
1733
info_cache.update(values)
1734
info_cache.save(session=session)
1736
# NOTE(tr3buchet): just in case someone blows away an instance's
1738
values['instance_id'] = instance_uuid
1739
info_cache = instance_info_cache_create(context, values)
1745
def instance_info_cache_delete(context, instance_uuid, session=None):
1746
"""Deletes an existing instance_info_cache record
1748
:param instance_uuid: = uuid of the instance tied to the cache record
1749
:param session: = optional session object
1751
values = {'deleted': True,
1752
'deleted_at': utils.utcnow()}
1753
instance_info_cache_update(context, instance_uuid, values, session)
1760
def key_pair_create(context, values):
1761
key_pair_ref = models.KeyPair()
1762
key_pair_ref.update(values)
1768
def key_pair_destroy(context, user_id, name):
1769
authorize_user_context(context, user_id)
1770
session = get_session()
1771
with session.begin():
1772
key_pair_ref = key_pair_get(context, user_id, name, session=session)
1773
key_pair_ref.delete(session=session)
1777
def key_pair_destroy_all_by_user(context, user_id):
1778
authorize_user_context(context, user_id)
1779
session = get_session()
1780
with session.begin():
1781
session.query(models.KeyPair).\
1782
filter_by(user_id=user_id).\
1783
update({'deleted': True,
1784
'deleted_at': utils.utcnow(),
1785
'updated_at': literal_column('updated_at')})
1789
def key_pair_get(context, user_id, name, session=None):
1790
authorize_user_context(context, user_id)
1791
result = model_query(context, models.KeyPair, session=session).\
1792
filter_by(user_id=user_id).\
1793
filter_by(name=name).\
1797
raise exception.KeypairNotFound(user_id=user_id, name=name)
1803
def key_pair_get_all_by_user(context, user_id):
1804
authorize_user_context(context, user_id)
1805
return model_query(context, models.KeyPair, read_deleted="no").\
1806
filter_by(user_id=user_id).\
1813
@require_admin_context
1814
def network_associate(context, project_id, force=False):
1815
"""Associate a project with a network.
1817
called by project_get_networks under certain conditions
1818
and network manager add_network_to_project()
1820
only associate if the project doesn't already have a network
1823
force solves race condition where a fresh project has multiple instance
1824
builds simultaneously picked up by multiple network hosts which attempt
1825
to associate the project with multiple networks
1826
force should only be used as a direct consequence of user request
1827
all automated requests should not use force
1829
session = get_session()
1830
with session.begin():
1832
def network_query(project_filter):
1833
return model_query(context, models.Network, session=session,
1834
read_deleted="no").\
1835
filter_by(project_id=project_filter).\
1836
with_lockmode('update').\
1840
# find out if project has a network
1841
network_ref = network_query(project_id)
1843
if force or not network_ref:
1844
# in force mode or project doesn't have a network so associate
1845
# with a new network
1848
network_ref = network_query(None)
1850
raise db.NoMoreNetworks()
1852
# associate with network
1853
# NOTE(vish): if with_lockmode isn't supported, as in sqlite,
1854
# then this has concurrency issues
1855
network_ref['project_id'] = project_id
1856
session.add(network_ref)
1860
@require_admin_context
1861
def network_count(context):
1862
return model_query(context, models.Network).count()
1865
@require_admin_context
1866
def _network_ips_query(context, network_id):
1867
return model_query(context, models.FixedIp, read_deleted="no").\
1868
filter_by(network_id=network_id)
1871
@require_admin_context
1872
def network_count_reserved_ips(context, network_id):
1873
return _network_ips_query(context, network_id).\
1874
filter_by(reserved=True).\
1878
@require_admin_context
1879
def network_create_safe(context, values):
1880
if values.get('vlan'):
1881
if model_query(context, models.Network, read_deleted="no")\
1882
.filter_by(vlan=values['vlan'])\
1884
raise exception.DuplicateVlan(vlan=values['vlan'])
1886
network_ref = models.Network()
1887
network_ref['uuid'] = str(utils.gen_uuid())
1888
network_ref.update(values)
1893
except IntegrityError:
1897
@require_admin_context
1898
def network_delete_safe(context, network_id):
1899
session = get_session()
1900
with session.begin():
1901
network_ref = network_get(context, network_id=network_id,
1903
session.delete(network_ref)
1906
@require_admin_context
1907
def network_disassociate(context, network_id):
1908
network_update(context, network_id, {'project_id': None,
1913
def network_get(context, network_id, session=None):
1914
result = model_query(context, models.Network, session=session,
1915
project_only=True).\
1916
filter_by(id=network_id).\
1920
raise exception.NetworkNotFound(network_id=network_id)
1925
@require_admin_context
1926
def network_get_all(context):
1927
result = model_query(context, models.Network, read_deleted="no").all()
1930
raise exception.NoNetworksFound()
1935
@require_admin_context
1936
def network_get_all_by_uuids(context, network_uuids, project_id=None):
1937
project_or_none = or_(models.Network.project_id == project_id,
1938
models.Network.project_id == None)
1939
result = model_query(context, models.Network, read_deleted="no").\
1940
filter(models.Network.uuid.in_(network_uuids)).\
1941
filter(project_or_none).\
1945
raise exception.NoNetworksFound()
1947
#check if host is set to all of the networks
1948
# returned in the result
1949
for network in result:
1950
if network['host'] is None:
1951
raise exception.NetworkHostNotSet(network_id=network['id'])
1953
#check if the result contains all the networks
1955
for network_uuid in network_uuids:
1957
for network in result:
1958
if network['uuid'] == network_uuid:
1963
raise exception.NetworkNotFoundForProject(
1964
network_uuid=network_uuid, project_id=context.project_id)
1965
raise exception.NetworkNotFound(network_id=network_uuid)
1969
# NOTE(vish): pylint complains because of the long method name, but
1970
# it fits with the names of the rest of the methods
1971
# pylint: disable=C0103
1974
@require_admin_context
1975
def network_get_associated_fixed_ips(context, network_id, host=None):
1976
# FIXME(sirp): since this returns fixed_ips, this would be better named
1977
# fixed_ip_get_all_by_network.
1978
# NOTE(vish): The ugly joins here are to solve a performance issue and
1979
# should be removed once we can add and remove leases
1980
# without regenerating the whole list
1981
vif_and = and_(models.VirtualInterface.id ==
1982
models.FixedIp.virtual_interface_id,
1983
models.VirtualInterface.deleted == False)
1984
inst_and = and_(models.Instance.id == models.FixedIp.instance_id,
1985
models.Instance.deleted == False)
1986
session = get_session()
1987
query = session.query(models.FixedIp.address,
1988
models.FixedIp.instance_id,
1989
models.FixedIp.network_id,
1990
models.FixedIp.virtual_interface_id,
1991
models.VirtualInterface.address,
1992
models.Instance.hostname,
1993
models.Instance.updated_at,
1994
models.Instance.created_at).\
1995
filter(models.FixedIp.deleted == False).\
1996
filter(models.FixedIp.network_id == network_id).\
1997
filter(models.FixedIp.allocated == True).\
1998
join((models.VirtualInterface, vif_and)).\
1999
join((models.Instance, inst_and)).\
2000
filter(models.FixedIp.instance_id != None).\
2001
filter(models.FixedIp.virtual_interface_id != None)
2003
query = query.filter(models.Instance.host == host)
2004
result = query.all()
2006
for datum in result:
2008
cleaned['address'] = datum[0]
2009
cleaned['instance_id'] = datum[1]
2010
cleaned['network_id'] = datum[2]
2011
cleaned['vif_id'] = datum[3]
2012
cleaned['vif_address'] = datum[4]
2013
cleaned['instance_hostname'] = datum[5]
2014
cleaned['instance_updated'] = datum[6]
2015
cleaned['instance_created'] = datum[7]
2016
data.append(cleaned)
2020
@require_admin_context
2021
def _network_get_query(context, session=None):
2022
return model_query(context, models.Network, session=session,
2026
@require_admin_context
2027
def network_get_by_bridge(context, bridge):
2028
result = _network_get_query(context).filter_by(bridge=bridge).first()
2031
raise exception.NetworkNotFoundForBridge(bridge=bridge)
2036
@require_admin_context
2037
def network_get_by_uuid(context, uuid):
2038
result = _network_get_query(context).filter_by(uuid=uuid).first()
2041
raise exception.NetworkNotFoundForUUID(uuid=uuid)
2046
@require_admin_context
2047
def network_get_by_cidr(context, cidr):
2048
result = _network_get_query(context).\
2049
filter(or_(models.Network.cidr == cidr,
2050
models.Network.cidr_v6 == cidr)).\
2054
raise exception.NetworkNotFoundForCidr(cidr=cidr)
2059
@require_admin_context
2060
def network_get_by_instance(context, instance_id):
2061
# note this uses fixed IP to get to instance
2062
# only works for networks the instance has an IP from
2063
result = _network_get_query(context).\
2064
filter_by(instance_id=instance_id).\
2068
raise exception.NetworkNotFoundForInstance(instance_id=instance_id)
2073
@require_admin_context
2074
def network_get_all_by_instance(context, instance_id):
2075
result = _network_get_query(context).\
2076
filter_by(instance_id=instance_id).\
2080
raise exception.NetworkNotFoundForInstance(instance_id=instance_id)
2085
@require_admin_context
2086
def network_get_all_by_host(context, host):
2087
session = get_session()
2088
fixed_ip_query = model_query(context, models.FixedIp.network_id,
2090
filter(models.FixedIp.host == host)
2091
# NOTE(vish): return networks that have host set
2092
# or that have a fixed ip with host set
2093
host_filter = or_(models.Network.host == host,
2094
models.Network.id.in_(fixed_ip_query.subquery()))
2095
return _network_get_query(context, session=session).\
2096
filter(host_filter).\
2100
@require_admin_context
2101
def network_set_host(context, network_id, host_id):
2102
session = get_session()
2103
with session.begin():
2104
network_ref = _network_get_query(context, session=session).\
2105
filter_by(id=network_id).\
2106
with_lockmode('update').\
2110
raise exception.NetworkNotFound(network_id=network_id)
2112
# NOTE(vish): if with_lockmode isn't supported, as in sqlite,
2113
# then this has concurrency issues
2114
if not network_ref['host']:
2115
network_ref['host'] = host_id
2116
session.add(network_ref)
2118
return network_ref['host']
2122
def network_update(context, network_id, values):
2123
session = get_session()
2124
with session.begin():
2125
network_ref = network_get(context, network_id, session=session)
2126
network_ref.update(values)
2127
network_ref.save(session=session)
2134
def queue_get_for(context, topic, physical_node_id):
2135
# FIXME(ja): this should be servername?
2136
return "%s.%s" % (topic, physical_node_id)
2142
@require_admin_context
2143
def iscsi_target_count_by_host(context, host):
2144
return model_query(context, models.IscsiTarget).\
2145
filter_by(host=host).\
2149
@require_admin_context
2150
def iscsi_target_create_safe(context, values):
2151
iscsi_target_ref = models.IscsiTarget()
2152
for (key, value) in values.iteritems():
2153
iscsi_target_ref[key] = value
2155
iscsi_target_ref.save()
2156
return iscsi_target_ref
2157
except IntegrityError:
2164
@require_admin_context
2165
def auth_token_destroy(context, token_id):
2166
session = get_session()
2167
with session.begin():
2168
token_ref = auth_token_get(context, token_id, session=session)
2169
token_ref.delete(session=session)
2172
@require_admin_context
2173
def auth_token_get(context, token_hash, session=None):
2174
result = model_query(context, models.AuthToken, session=session).\
2175
filter_by(token_hash=token_hash).\
2179
raise exception.AuthTokenNotFound(token=token_hash)
2184
@require_admin_context
2185
def auth_token_update(context, token_hash, values):
2186
session = get_session()
2187
with session.begin():
2188
token_ref = auth_token_get(context, token_hash, session=session)
2189
token_ref.update(values)
2190
token_ref.save(session=session)
2193
@require_admin_context
2194
def auth_token_create(context, token):
2195
tk = models.AuthToken()
2205
def quota_get(context, project_id, resource, session=None):
2206
result = model_query(context, models.Quota, session=session,
2207
read_deleted="no").\
2208
filter_by(project_id=project_id).\
2209
filter_by(resource=resource).\
2213
raise exception.ProjectQuotaNotFound(project_id=project_id)
2219
def quota_get_all_by_project(context, project_id):
2220
authorize_project_context(context, project_id)
2222
rows = model_query(context, models.Quota, read_deleted="no").\
2223
filter_by(project_id=project_id).\
2226
result = {'project_id': project_id}
2228
result[row.resource] = row.hard_limit
2233
@require_admin_context
2234
def quota_create(context, project_id, resource, limit):
2235
# NOTE: Treat -1 as unlimited for consistency w/ flags
2238
quota_ref = models.Quota()
2239
quota_ref.project_id = project_id
2240
quota_ref.resource = resource
2241
quota_ref.hard_limit = limit
2246
@require_admin_context
2247
def quota_update(context, project_id, resource, limit):
2248
# NOTE: Treat -1 as unlimited for consistency w/ flags
2251
session = get_session()
2252
with session.begin():
2253
quota_ref = quota_get(context, project_id, resource, session=session)
2254
quota_ref.hard_limit = limit
2255
quota_ref.save(session=session)
2258
@require_admin_context
2259
def quota_destroy(context, project_id, resource):
2260
session = get_session()
2261
with session.begin():
2262
quota_ref = quota_get(context, project_id, resource, session=session)
2263
quota_ref.delete(session=session)
2266
@require_admin_context
2267
def quota_destroy_all_by_project(context, project_id):
2268
session = get_session()
2269
with session.begin():
2270
quotas = model_query(context, models.Quota, session=session,
2271
read_deleted="no").\
2272
filter_by(project_id=project_id).\
2275
for quota_ref in quotas:
2276
quota_ref.delete(session=session)
2282
@require_admin_context
2283
def volume_allocate_iscsi_target(context, volume_id, host):
2284
session = get_session()
2285
with session.begin():
2286
iscsi_target_ref = model_query(context, models.IscsiTarget,
2287
session=session, read_deleted="no").\
2288
filter_by(volume=None).\
2289
filter_by(host=host).\
2290
with_lockmode('update').\
2293
# NOTE(vish): if with_lockmode isn't supported, as in sqlite,
2294
# then this has concurrency issues
2295
if not iscsi_target_ref:
2296
raise db.NoMoreTargets()
2298
iscsi_target_ref.volume_id = volume_id
2299
session.add(iscsi_target_ref)
2301
return iscsi_target_ref.target_num
2304
@require_admin_context
2305
def volume_attached(context, volume_id, instance_id, mountpoint):
2306
session = get_session()
2307
with session.begin():
2308
volume_ref = volume_get(context, volume_id, session=session)
2309
volume_ref['status'] = 'in-use'
2310
volume_ref['mountpoint'] = mountpoint
2311
volume_ref['attach_status'] = 'attached'
2312
volume_ref.instance = instance_get(context, instance_id,
2314
volume_ref.save(session=session)
2318
def volume_create(context, values):
2319
values['volume_metadata'] = _metadata_refs(values.get('metadata'),
2320
models.VolumeMetadata)
2321
volume_ref = models.Volume()
2322
volume_ref.update(values)
2324
session = get_session()
2325
with session.begin():
2326
volume_ref.save(session=session)
2330
@require_admin_context
2331
def volume_data_get_for_project(context, project_id):
2332
result = model_query(context,
2333
func.count(models.Volume.id),
2334
func.sum(models.Volume.size),
2335
read_deleted="no").\
2336
filter_by(project_id=project_id).\
2339
# NOTE(vish): convert None to 0
2340
return (result[0] or 0, result[1] or 0)
2343
@require_admin_context
2344
def volume_destroy(context, volume_id):
2345
session = get_session()
2346
with session.begin():
2347
session.query(models.Volume).\
2348
filter_by(id=volume_id).\
2349
update({'deleted': True,
2350
'deleted_at': utils.utcnow(),
2351
'updated_at': literal_column('updated_at')})
2352
session.query(models.IscsiTarget).\
2353
filter_by(volume_id=volume_id).\
2354
update({'volume_id': None})
2355
session.query(models.VolumeMetadata).\
2356
filter_by(volume_id=volume_id).\
2357
update({'deleted': True,
2358
'deleted_at': utils.utcnow(),
2359
'updated_at': literal_column('updated_at')})
2362
@require_admin_context
2363
def volume_detached(context, volume_id):
2364
session = get_session()
2365
with session.begin():
2366
volume_ref = volume_get(context, volume_id, session=session)
2367
volume_ref['status'] = 'available'
2368
volume_ref['mountpoint'] = None
2369
volume_ref['attach_status'] = 'detached'
2370
volume_ref.instance = None
2371
volume_ref.save(session=session)
2375
def _volume_get_query(context, session=None, project_only=False):
2376
return model_query(context, models.Volume, session=session,
2377
project_only=project_only).\
2378
options(joinedload('instance')).\
2379
options(joinedload('volume_metadata')).\
2380
options(joinedload('volume_type'))
2384
def volume_get(context, volume_id, session=None):
2385
result = _volume_get_query(context, session=session, project_only=True).\
2386
filter_by(id=volume_id).\
2390
raise exception.VolumeNotFound(volume_id=volume_id)
2395
@require_admin_context
2396
def volume_get_all(context):
2397
return _volume_get_query(context).all()
2400
@require_admin_context
2401
def volume_get_all_by_host(context, host):
2402
return _volume_get_query(context).filter_by(host=host).all()
2405
@require_admin_context
2406
def volume_get_all_by_instance(context, instance_id):
2407
result = model_query(context, models.Volume, read_deleted="no").\
2408
options(joinedload('volume_metadata')).\
2409
options(joinedload('volume_type')).\
2410
filter_by(instance_id=instance_id).\
2414
raise exception.VolumeNotFoundForInstance(instance_id=instance_id)
2420
def volume_get_all_by_project(context, project_id):
2421
authorize_project_context(context, project_id)
2422
return _volume_get_query(context).filter_by(project_id=project_id).all()
2425
@require_admin_context
2426
def volume_get_instance(context, volume_id):
2427
result = _volume_get_query(context).filter_by(id=volume_id).first()
2430
raise exception.VolumeNotFound(volume_id=volume_id)
2432
return result.instance
2435
@require_admin_context
2436
def volume_get_iscsi_target_num(context, volume_id):
2437
result = model_query(context, models.IscsiTarget, read_deleted="yes").\
2438
filter_by(volume_id=volume_id).\
2442
raise exception.ISCSITargetNotFoundForVolume(volume_id=volume_id)
2444
return result.target_num
2448
def volume_update(context, volume_id, values):
2449
session = get_session()
2450
metadata = values.get('metadata')
2451
if metadata is not None:
2452
volume_metadata_update(context,
2454
values.pop('metadata'),
2456
with session.begin():
2457
volume_ref = volume_get(context, volume_id, session=session)
2458
volume_ref.update(values)
2459
volume_ref.save(session=session)
2462
####################
2464
def _volume_metadata_get_query(context, volume_id, session=None):
2465
return model_query(context, models.VolumeMetadata,
2466
session=session, read_deleted="no").\
2467
filter_by(volume_id=volume_id)
2471
@require_volume_exists
2472
def volume_metadata_get(context, volume_id):
2473
rows = _volume_metadata_get_query(context, volume_id).all()
2476
result[row['key']] = row['value']
2482
@require_volume_exists
2483
def volume_metadata_delete(context, volume_id, key):
2484
_volume_metadata_get_query(context, volume_id).\
2485
filter_by(key=key).\
2486
update({'deleted': True,
2487
'deleted_at': utils.utcnow(),
2488
'updated_at': literal_column('updated_at')})
2492
@require_volume_exists
2493
def volume_metadata_get_item(context, volume_id, key, session=None):
2494
result = _volume_metadata_get_query(context, volume_id, session=session).\
2495
filter_by(key=key).\
2499
raise exception.VolumeMetadataNotFound(metadata_key=key,
2500
volume_id=volume_id)
2505
@require_volume_exists
2506
def volume_metadata_update(context, volume_id, metadata, delete):
2507
session = get_session()
2509
# Set existing metadata to deleted if delete argument is True
2511
original_metadata = volume_metadata_get(context, volume_id)
2512
for meta_key, meta_value in original_metadata.iteritems():
2513
if meta_key not in metadata:
2514
meta_ref = volume_metadata_get_item(context, volume_id,
2516
meta_ref.update({'deleted': True})
2517
meta_ref.save(session=session)
2521
# Now update all existing items with new values, or create new meta objects
2522
for meta_key, meta_value in metadata.iteritems():
2524
# update the value whether it exists or not
2525
item = {"value": meta_value}
2528
meta_ref = volume_metadata_get_item(context, volume_id,
2530
except exception.VolumeMetadataNotFound, e:
2531
meta_ref = models.VolumeMetadata()
2532
item.update({"key": meta_key, "volume_id": volume_id})
2534
meta_ref.update(item)
2535
meta_ref.save(session=session)
2544
def snapshot_create(context, values):
2545
snapshot_ref = models.Snapshot()
2546
snapshot_ref.update(values)
2548
session = get_session()
2549
with session.begin():
2550
snapshot_ref.save(session=session)
2554
@require_admin_context
2555
def snapshot_destroy(context, snapshot_id):
2556
session = get_session()
2557
with session.begin():
2558
session.query(models.Snapshot).\
2559
filter_by(id=snapshot_id).\
2560
update({'deleted': True,
2561
'deleted_at': utils.utcnow(),
2562
'updated_at': literal_column('updated_at')})
2566
def snapshot_get(context, snapshot_id, session=None):
2567
result = model_query(context, models.Snapshot, session=session,
2568
project_only=True).\
2569
filter_by(id=snapshot_id).\
2573
raise exception.SnapshotNotFound(snapshot_id=snapshot_id)
2578
@require_admin_context
2579
def snapshot_get_all(context):
2580
return model_query(context, models.Snapshot).all()
2584
def snapshot_get_all_for_volume(context, volume_id):
2585
return model_query(context, models.Snapshot, read_deleted='no',
2586
project_only=True).\
2587
filter_by(volume_id=volume_id).all()
2591
def snapshot_get_all_by_project(context, project_id):
2592
authorize_project_context(context, project_id)
2593
return model_query(context, models.Snapshot).\
2594
filter_by(project_id=project_id).\
2599
def snapshot_update(context, snapshot_id, values):
2600
session = get_session()
2601
with session.begin():
2602
snapshot_ref = snapshot_get(context, snapshot_id, session=session)
2603
snapshot_ref.update(values)
2604
snapshot_ref.save(session=session)
2610
def _block_device_mapping_get_query(context, session=None):
2611
return model_query(context, models.BlockDeviceMapping, session=session,
2616
def block_device_mapping_create(context, values):
2617
bdm_ref = models.BlockDeviceMapping()
2618
bdm_ref.update(values)
2620
session = get_session()
2621
with session.begin():
2622
bdm_ref.save(session=session)
2626
def block_device_mapping_update(context, bdm_id, values):
2627
session = get_session()
2628
with session.begin():
2629
_block_device_mapping_get_query(context, session=session).\
2630
filter_by(id=bdm_id).\
2635
def block_device_mapping_update_or_create(context, values):
2636
session = get_session()
2637
with session.begin():
2638
result = _block_device_mapping_get_query(context, session=session).\
2639
filter_by(instance_id=values['instance_id']).\
2640
filter_by(device_name=values['device_name']).\
2643
bdm_ref = models.BlockDeviceMapping()
2644
bdm_ref.update(values)
2645
bdm_ref.save(session=session)
2647
result.update(values)
2649
# NOTE(yamahata): same virtual device name can be specified multiple
2650
# times. So delete the existing ones.
2651
virtual_name = values['virtual_name']
2652
if (virtual_name is not None and
2653
block_device.is_swap_or_ephemeral(virtual_name)):
2654
session.query(models.BlockDeviceMapping).\
2655
filter_by(instance_id=values['instance_id']).\
2656
filter_by(virtual_name=virtual_name).\
2657
filter(models.BlockDeviceMapping.device_name !=
2658
values['device_name']).\
2659
update({'deleted': True,
2660
'deleted_at': utils.utcnow(),
2661
'updated_at': literal_column('updated_at')})
2665
def block_device_mapping_get_all_by_instance(context, instance_id):
2666
return _block_device_mapping_get_query(context).\
2667
filter_by(instance_id=instance_id).\
2672
def block_device_mapping_destroy(context, bdm_id):
2673
session = get_session()
2674
with session.begin():
2675
session.query(models.BlockDeviceMapping).\
2676
filter_by(id=bdm_id).\
2677
update({'deleted': True,
2678
'deleted_at': utils.utcnow(),
2679
'updated_at': literal_column('updated_at')})
2683
def block_device_mapping_destroy_by_instance_and_volume(context, instance_id,
2685
session = get_session()
2686
with session.begin():
2687
_block_device_mapping_get_query(context, session=session).\
2688
filter_by(instance_id=instance_id).\
2689
filter_by(volume_id=volume_id).\
2690
update({'deleted': True,
2691
'deleted_at': utils.utcnow(),
2692
'updated_at': literal_column('updated_at')})
2697
def _security_group_get_query(context, session=None, read_deleted=None,
2698
project_only=False):
2699
return model_query(context, models.SecurityGroup, session=session,
2700
read_deleted=read_deleted, project_only=project_only).\
2701
options(joinedload_all('rules'))
2705
def security_group_get_all(context):
2706
return _security_group_get_query(context).all()
2710
def security_group_get(context, security_group_id, session=None):
2711
result = _security_group_get_query(context, session=session,
2712
project_only=True).\
2713
filter_by(id=security_group_id).\
2714
options(joinedload_all('instances')).\
2718
raise exception.SecurityGroupNotFound(
2719
security_group_id=security_group_id)
2725
def security_group_get_by_name(context, project_id, group_name):
2726
result = _security_group_get_query(context, read_deleted="no").\
2727
filter_by(project_id=project_id).\
2728
filter_by(name=group_name).\
2729
options(joinedload_all('instances')).\
2733
raise exception.SecurityGroupNotFoundForProject(
2734
project_id=project_id, security_group_id=group_name)
2740
def security_group_get_by_project(context, project_id):
2741
return _security_group_get_query(context, read_deleted="no").\
2742
filter_by(project_id=project_id).\
2747
def security_group_get_by_instance(context, instance_id):
2748
return _security_group_get_query(context, read_deleted="no").\
2749
join(models.SecurityGroup.instances).\
2750
filter_by(id=instance_id).\
2755
def security_group_exists(context, project_id, group_name):
2757
group = security_group_get_by_name(context, project_id, group_name)
2758
return group is not None
2759
except exception.NotFound:
2764
def security_group_in_use(context, group_id):
2765
session = get_session()
2766
with session.begin():
2767
# Are there any instances that haven't been deleted
2768
# that include this group?
2769
inst_assoc = session.query(models.SecurityGroupInstanceAssociation).\
2770
filter_by(security_group_id=group_id).\
2771
filter_by(deleted=False).\
2773
for ia in inst_assoc:
2774
num_instances = session.query(models.Instance).\
2775
filter_by(deleted=False).\
2776
filter_by(id=ia.instance_id).\
2785
def security_group_create(context, values):
2786
security_group_ref = models.SecurityGroup()
2787
# FIXME(devcamcar): Unless I do this, rules fails with lazy load exception
2788
# once save() is called. This will get cleaned up in next orm pass.
2789
security_group_ref.rules
2790
security_group_ref.update(values)
2791
security_group_ref.save()
2792
return security_group_ref
2796
def security_group_destroy(context, security_group_id):
2797
session = get_session()
2798
with session.begin():
2799
session.query(models.SecurityGroup).\
2800
filter_by(id=security_group_id).\
2801
update({'deleted': True,
2802
'deleted_at': utils.utcnow(),
2803
'updated_at': literal_column('updated_at')})
2804
session.query(models.SecurityGroupInstanceAssociation).\
2805
filter_by(security_group_id=security_group_id).\
2806
update({'deleted': True,
2807
'deleted_at': utils.utcnow(),
2808
'updated_at': literal_column('updated_at')})
2809
session.query(models.SecurityGroupIngressRule).\
2810
filter_by(group_id=security_group_id).\
2811
update({'deleted': True,
2812
'deleted_at': utils.utcnow(),
2813
'updated_at': literal_column('updated_at')})
2819
def _security_group_rule_get_query(context, session=None):
2820
return model_query(context, models.SecurityGroupIngressRule,
2825
def security_group_rule_get(context, security_group_rule_id, session=None):
2826
result = _security_group_rule_get_query(context, session=session).\
2827
filter_by(id=security_group_rule_id).\
2831
raise exception.SecurityGroupNotFoundForRule(
2832
rule_id=security_group_rule_id)
2838
def security_group_rule_get_by_security_group(context, security_group_id,
2840
return _security_group_rule_get_query(context, session=session).\
2841
filter_by(parent_group_id=security_group_id).\
2842
options(joinedload_all('grantee_group.instances')).\
2847
def security_group_rule_get_by_security_group_grantee(context,
2851
return _security_group_rule_get_query(context, session=session).\
2852
filter_by(group_id=security_group_id).\
2857
def security_group_rule_create(context, values):
2858
security_group_rule_ref = models.SecurityGroupIngressRule()
2859
security_group_rule_ref.update(values)
2860
security_group_rule_ref.save()
2861
return security_group_rule_ref
2865
def security_group_rule_destroy(context, security_group_rule_id):
2866
session = get_session()
2867
with session.begin():
2868
security_group_rule = security_group_rule_get(context,
2869
security_group_rule_id,
2871
security_group_rule.delete(session=session)
2877
@require_admin_context
2878
def provider_fw_rule_create(context, rule):
2879
fw_rule_ref = models.ProviderFirewallRule()
2880
fw_rule_ref.update(rule)
2885
@require_admin_context
2886
def provider_fw_rule_get_all(context):
2887
return model_query(context, models.ProviderFirewallRule).all()
2890
@require_admin_context
2891
def provider_fw_rule_destroy(context, rule_id):
2892
session = get_session()
2893
with session.begin():
2894
session.query(models.ProviderFirewallRule).\
2895
filter_by(id=rule_id).\
2896
update({'deleted': True,
2897
'deleted_at': utils.utcnow(),
2898
'updated_at': literal_column('updated_at')})
2904
@require_admin_context
2905
def user_get(context, id, session=None):
2906
result = model_query(context, models.User, session=session).\
2911
raise exception.UserNotFound(user_id=id)
2916
@require_admin_context
2917
def user_get_by_access_key(context, access_key, session=None):
2918
result = model_query(context, models.User, session=session).\
2919
filter_by(access_key=access_key).\
2923
raise exception.AccessKeyNotFound(access_key=access_key)
2928
@require_admin_context
2929
def user_create(context, values):
2930
user_ref = models.User()
2931
user_ref.update(values)
2936
@require_admin_context
2937
def user_delete(context, id):
2938
session = get_session()
2939
with session.begin():
2940
session.query(models.UserProjectAssociation).\
2941
filter_by(user_id=id).\
2943
session.query(models.UserRoleAssociation).\
2944
filter_by(user_id=id).\
2946
session.query(models.UserProjectRoleAssociation).\
2947
filter_by(user_id=id).\
2949
user_ref = user_get(context, id, session=session)
2950
session.delete(user_ref)
2953
def user_get_all(context):
2954
return model_query(context, models.User).all()
2957
def user_get_roles(context, user_id):
2958
session = get_session()
2959
with session.begin():
2960
user_ref = user_get(context, user_id, session=session)
2961
return [role.role for role in user_ref['roles']]
2964
def user_get_roles_for_project(context, user_id, project_id):
2965
session = get_session()
2966
with session.begin():
2967
res = session.query(models.UserProjectRoleAssociation).\
2968
filter_by(user_id=user_id).\
2969
filter_by(project_id=project_id).\
2971
return [association.role for association in res]
2974
def user_remove_project_role(context, user_id, project_id, role):
2975
session = get_session()
2976
with session.begin():
2977
session.query(models.UserProjectRoleAssociation).\
2978
filter_by(user_id=user_id).\
2979
filter_by(project_id=project_id).\
2980
filter_by(role=role).\
2984
def user_remove_role(context, user_id, role):
2985
session = get_session()
2986
with session.begin():
2987
res = session.query(models.UserRoleAssociation).\
2988
filter_by(user_id=user_id).\
2989
filter_by(role=role).\
2992
session.delete(role)
2995
def user_add_role(context, user_id, role):
2996
session = get_session()
2997
with session.begin():
2998
user_ref = user_get(context, user_id, session=session)
2999
models.UserRoleAssociation(user=user_ref, role=role).\
3000
save(session=session)
3003
def user_add_project_role(context, user_id, project_id, role):
3004
session = get_session()
3005
with session.begin():
3006
user_ref = user_get(context, user_id, session=session)
3007
project_ref = project_get(context, project_id, session=session)
3008
models.UserProjectRoleAssociation(user_id=user_ref['id'],
3009
project_id=project_ref['id'],
3010
role=role).save(session=session)
3013
def user_update(context, user_id, values):
3014
session = get_session()
3015
with session.begin():
3016
user_ref = user_get(context, user_id, session=session)
3017
user_ref.update(values)
3018
user_ref.save(session=session)
3024
def project_create(context, values):
3025
project_ref = models.Project()
3026
project_ref.update(values)
3031
def project_add_member(context, project_id, user_id):
3032
session = get_session()
3033
with session.begin():
3034
project_ref = project_get(context, project_id, session=session)
3035
user_ref = user_get(context, user_id, session=session)
3037
project_ref.members += [user_ref]
3038
project_ref.save(session=session)
3041
def project_get(context, id, session=None):
3042
result = model_query(context, models.Project, session=session,
3043
read_deleted="no").\
3045
options(joinedload_all('members')).\
3049
raise exception.ProjectNotFound(project_id=id)
3054
def project_get_all(context):
3055
return model_query(context, models.Project).\
3056
options(joinedload_all('members')).\
3060
def project_get_by_user(context, user_id):
3061
user = model_query(context, models.User).\
3062
filter_by(id=user_id).\
3063
options(joinedload_all('projects')).\
3067
raise exception.UserNotFound(user_id=user_id)
3069
return user.projects
3072
def project_remove_member(context, project_id, user_id):
3073
session = get_session()
3074
project = project_get(context, project_id, session=session)
3075
user = user_get(context, user_id, session=session)
3077
if user in project.members:
3078
project.members.remove(user)
3079
project.save(session=session)
3082
def project_update(context, project_id, values):
3083
session = get_session()
3084
with session.begin():
3085
project_ref = project_get(context, project_id, session=session)
3086
project_ref.update(values)
3087
project_ref.save(session=session)
3090
def project_delete(context, id):
3091
session = get_session()
3092
with session.begin():
3093
session.query(models.UserProjectAssociation).\
3094
filter_by(project_id=id).\
3096
session.query(models.UserProjectRoleAssociation).\
3097
filter_by(project_id=id).\
3099
project_ref = project_get(context, id, session=session)
3100
session.delete(project_ref)
3104
def project_get_networks(context, project_id, associate=True):
3105
# NOTE(tr3buchet): as before this function will associate
3106
# a project with a network if it doesn't have one and
3108
result = model_query(context, models.Network, read_deleted="no").\
3109
filter_by(project_id=project_id).\
3116
return [network_associate(context, project_id)]
3124
@require_admin_context
3125
def migration_create(context, values):
3126
migration = models.Migration()
3127
migration.update(values)
3132
@require_admin_context
3133
def migration_update(context, id, values):
3134
session = get_session()
3135
with session.begin():
3136
migration = migration_get(context, id, session=session)
3137
migration.update(values)
3138
migration.save(session=session)
3142
@require_admin_context
3143
def migration_get(context, id, session=None):
3144
result = model_query(context, models.Migration, session=session,
3145
read_deleted="yes").\
3150
raise exception.MigrationNotFound(migration_id=id)
3155
@require_admin_context
3156
def migration_get_by_instance_and_status(context, instance_uuid, status):
3157
result = model_query(context, models.Migration, read_deleted="yes").\
3158
filter_by(instance_uuid=instance_uuid).\
3159
filter_by(status=status).\
3163
raise exception.MigrationNotFoundByStatus(instance_id=instance_uuid,
3169
@require_admin_context
3170
def migration_get_all_unconfirmed(context, confirm_window, session=None):
3171
confirm_window = datetime.datetime.utcnow() - datetime.timedelta(
3172
seconds=confirm_window)
3174
return model_query(context, models.Migration, session=session,
3175
read_deleted="yes").\
3176
filter(models.Migration.updated_at <= confirm_window).\
3177
filter_by(status="FINISHED").\
3184
def console_pool_create(context, values):
3185
pool = models.ConsolePool()
3191
def console_pool_get(context, pool_id):
3192
result = model_query(context, models.ConsolePool, read_deleted="no").\
3193
filter_by(id=pool_id).\
3197
raise exception.ConsolePoolNotFound(pool_id=pool_id)
3202
def console_pool_get_by_host_type(context, compute_host, host,
3205
result = model_query(context, models.ConsolePool, read_deleted="no").\
3206
filter_by(host=host).\
3207
filter_by(console_type=console_type).\
3208
filter_by(compute_host=compute_host).\
3209
options(joinedload('consoles')).\
3213
raise exception.ConsolePoolNotFoundForHostType(
3214
host=host, console_type=console_type,
3215
compute_host=compute_host)
3220
def console_pool_get_all_by_host_type(context, host, console_type):
3221
return model_query(context, models.ConsolePool, read_deleted="no").\
3222
filter_by(host=host).\
3223
filter_by(console_type=console_type).\
3224
options(joinedload('consoles')).\
3228
def console_create(context, values):
3229
console = models.Console()
3230
console.update(values)
3235
def console_delete(context, console_id):
3236
session = get_session()
3237
with session.begin():
3238
# NOTE(mdragon): consoles are meant to be transient.
3239
session.query(models.Console).\
3240
filter_by(id=console_id).\
3244
def console_get_by_pool_instance(context, pool_id, instance_id):
3245
result = model_query(context, models.Console, read_deleted="yes").\
3246
filter_by(pool_id=pool_id).\
3247
filter_by(instance_id=instance_id).\
3248
options(joinedload('pool')).\
3252
raise exception.ConsoleNotFoundInPoolForInstance(
3253
pool_id=pool_id, instance_id=instance_id)
3258
def console_get_all_by_instance(context, instance_id):
3259
return model_query(context, models.Console, read_deleted="yes").\
3260
filter_by(instance_id=instance_id).\
3264
def console_get(context, console_id, instance_id=None):
3265
query = model_query(context, models.Console, read_deleted="yes").\
3266
filter_by(id=console_id).\
3267
options(joinedload('pool'))
3269
if instance_id is not None:
3270
query = query.filter_by(instance_id=instance_id)
3272
result = query.first()
3276
raise exception.ConsoleNotFoundForInstance(
3277
console_id=console_id, instance_id=instance_id)
3279
raise exception.ConsoleNotFound(console_id=console_id)
3287
@require_admin_context
3288
def instance_type_create(context, values):
3289
"""Create a new instance type. In order to pass in extra specs,
3290
the values dict should contain a 'extra_specs' key/value pair:
3292
{'extra_specs' : {'k1': 'v1', 'k2': 'v2', ...}}
3295
session = get_session()
3296
with session.begin():
3298
instance_type_get_by_name(context, values['name'], session)
3299
raise exception.InstanceTypeExists(name=values['name'])
3300
except exception.InstanceTypeNotFoundByName:
3303
instance_type_get_by_flavor_id(context, values['flavorid'],
3305
raise exception.InstanceTypeExists(name=values['name'])
3306
except exception.FlavorNotFound:
3309
specs = values.get('extra_specs')
3312
for k, v in specs.iteritems():
3313
specs_ref = models.InstanceTypeExtraSpecs()
3314
specs_ref['key'] = k
3315
specs_ref['value'] = v
3316
specs_refs.append(specs_ref)
3317
values['extra_specs'] = specs_refs
3318
instance_type_ref = models.InstanceTypes()
3319
instance_type_ref.update(values)
3320
instance_type_ref.save(session=session)
3321
except Exception, e:
3322
raise exception.DBError(e)
3323
return _dict_with_extra_specs(instance_type_ref)
3326
def _dict_with_extra_specs(inst_type_query):
3327
"""Takes an instance, volume, or instance type query returned
3328
by sqlalchemy and returns it as a dictionary, converting the
3329
extra_specs entry from a list of dicts:
3331
'extra_specs' : [{'key': 'k1', 'value': 'v1', ...}, ...]
3335
'extra_specs' : {'k1': 'v1'}
3338
inst_type_dict = dict(inst_type_query)
3339
extra_specs = dict([(x['key'], x['value'])
3340
for x in inst_type_query['extra_specs']])
3341
inst_type_dict['extra_specs'] = extra_specs
3342
return inst_type_dict
3345
def _instance_type_get_query(context, session=None, read_deleted=None):
3346
return model_query(context, models.InstanceTypes, session=session,
3347
read_deleted=read_deleted).\
3348
options(joinedload('extra_specs'))
3352
def instance_type_get_all(context, inactive=False, filters=None):
3354
Returns all instance types.
3356
filters = filters or {}
3357
read_deleted = "yes" if inactive else "no"
3358
query = _instance_type_get_query(context, read_deleted=read_deleted)
3360
if 'min_memory_mb' in filters:
3361
query = query.filter(
3362
models.InstanceTypes.memory_mb >= filters['min_memory_mb'])
3363
if 'min_root_gb' in filters:
3364
query = query.filter(
3365
models.InstanceTypes.root_gb >= filters['min_root_gb'])
3367
inst_types = query.order_by("name").all()
3369
return [_dict_with_extra_specs(i) for i in inst_types]
3373
def instance_type_get(context, id, session=None):
3374
"""Returns a dict describing specific instance_type"""
3375
result = _instance_type_get_query(context, session=session).\
3380
raise exception.InstanceTypeNotFound(instance_type_id=id)
3382
return _dict_with_extra_specs(result)
3386
def instance_type_get_by_name(context, name, session=None):
3387
"""Returns a dict describing specific instance_type"""
3388
result = _instance_type_get_query(context, session=session).\
3389
filter_by(name=name).\
3393
raise exception.InstanceTypeNotFoundByName(instance_type_name=name)
3395
return _dict_with_extra_specs(result)
3399
def instance_type_get_by_flavor_id(context, flavor_id, session=None):
3400
"""Returns a dict describing specific flavor_id"""
3401
result = _instance_type_get_query(context, session=session).\
3402
filter_by(flavorid=flavor_id).\
3406
raise exception.FlavorNotFound(flavor_id=flavor_id)
3408
return _dict_with_extra_specs(result)
3411
@require_admin_context
3412
def instance_type_destroy(context, name):
3413
"""Marks specific instance_type as deleted"""
3414
session = get_session()
3415
with session.begin():
3416
instance_type_ref = instance_type_get_by_name(context, name,
3418
instance_type_id = instance_type_ref['id']
3419
session.query(models.InstanceTypes).\
3420
filter_by(id=instance_type_id).\
3421
update({'deleted': True,
3422
'deleted_at': utils.utcnow(),
3423
'updated_at': literal_column('updated_at')})
3424
session.query(models.InstanceTypeExtraSpecs).\
3425
filter_by(instance_type_id=instance_type_id).\
3426
update({'deleted': True,
3427
'deleted_at': utils.utcnow(),
3428
'updated_at': literal_column('updated_at')})
3431
####################
3434
@require_admin_context
3435
def cell_create(context, values):
3436
cell = models.Cell()
3442
def _cell_get_by_id_query(context, cell_id, session=None):
3443
return model_query(context, models.Cell, session=session).\
3444
filter_by(id=cell_id)
3447
@require_admin_context
3448
def cell_update(context, cell_id, values):
3449
cell = cell_get(context, cell_id)
3455
@require_admin_context
3456
def cell_delete(context, cell_id):
3457
session = get_session()
3458
with session.begin():
3459
_cell_get_by_id_query(context, cell_id, session=session).\
3463
@require_admin_context
3464
def cell_get(context, cell_id):
3465
result = _cell_get_by_id_query(context, cell_id).first()
3468
raise exception.CellNotFound(cell_id=cell_id)
3473
@require_admin_context
3474
def cell_get_all(context):
3475
return model_query(context, models.Cell, read_deleted="no").all()
3478
####################
3481
def _instance_metadata_get_query(context, instance_id, session=None):
3482
return model_query(context, models.InstanceMetadata, session=session,
3483
read_deleted="no").\
3484
filter_by(instance_id=instance_id)
3488
@require_instance_exists
3489
def instance_metadata_get(context, instance_id):
3490
rows = _instance_metadata_get_query(context, instance_id).all()
3494
result[row['key']] = row['value']
3500
@require_instance_exists
3501
def instance_metadata_delete(context, instance_id, key):
3502
_instance_metadata_get_query(context, instance_id).\
3503
filter_by(key=key).\
3504
update({'deleted': True,
3505
'deleted_at': utils.utcnow(),
3506
'updated_at': literal_column('updated_at')})
3510
@require_instance_exists
3511
def instance_metadata_get_item(context, instance_id, key, session=None):
3512
result = _instance_metadata_get_query(
3513
context, instance_id, session=session).\
3514
filter_by(key=key).\
3518
raise exception.InstanceMetadataNotFound(metadata_key=key,
3519
instance_id=instance_id)
3525
@require_instance_exists
3526
def instance_metadata_update(context, instance_id, metadata, delete):
3527
session = get_session()
3529
# Set existing metadata to deleted if delete argument is True
3531
original_metadata = instance_metadata_get(context, instance_id)
3532
for meta_key, meta_value in original_metadata.iteritems():
3533
if meta_key not in metadata:
3534
meta_ref = instance_metadata_get_item(context, instance_id,
3536
meta_ref.update({'deleted': True})
3537
meta_ref.save(session=session)
3541
# Now update all existing items with new values, or create new meta objects
3542
for meta_key, meta_value in metadata.iteritems():
3544
# update the value whether it exists or not
3545
item = {"value": meta_value}
3548
meta_ref = instance_metadata_get_item(context, instance_id,
3550
except exception.InstanceMetadataNotFound, e:
3551
meta_ref = models.InstanceMetadata()
3552
item.update({"key": meta_key, "instance_id": instance_id})
3554
meta_ref.update(item)
3555
meta_ref.save(session=session)
3560
####################
3563
@require_admin_context
3564
def agent_build_create(context, values):
3565
agent_build_ref = models.AgentBuild()
3566
agent_build_ref.update(values)
3567
agent_build_ref.save()
3568
return agent_build_ref
3571
@require_admin_context
3572
def agent_build_get_by_triple(context, hypervisor, os, architecture,
3574
return model_query(context, models.AgentBuild, session=session,
3575
read_deleted="no").\
3576
filter_by(hypervisor=hypervisor).\
3578
filter_by(architecture=architecture).\
3582
@require_admin_context
3583
def agent_build_get_all(context):
3584
return model_query(context, models.AgentBuild, read_deleted="no").\
3588
@require_admin_context
3589
def agent_build_destroy(context, agent_build_id):
3590
session = get_session()
3591
with session.begin():
3592
model_query(context, models.AgentBuild, session=session,
3593
read_deleted="yes").\
3594
filter_by(id=agent_build_id).\
3595
update({'deleted': True,
3596
'deleted_at': utils.utcnow(),
3597
'updated_at': literal_column('updated_at')})
3600
@require_admin_context
3601
def agent_build_update(context, agent_build_id, values):
3602
session = get_session()
3603
with session.begin():
3604
agent_build_ref = model_query(context, models.AgentBuild,
3605
session=session, read_deleted="yes").\
3606
filter_by(id=agent_build_id).\
3609
agent_build_ref.update(values)
3610
agent_build_ref.save(session=session)
3613
####################
3616
def bw_usage_get_by_macs(context, macs, start_period):
3617
return model_query(context, models.BandwidthUsage, read_deleted="yes").\
3618
filter(models.BandwidthUsage.mac.in_(macs)).\
3619
filter_by(start_period=start_period).\
3624
def bw_usage_update(context,
3630
session = get_session()
3632
with session.begin():
3633
bwusage = model_query(context, models.BandwidthUsage,
3634
session=session, read_deleted="yes").\
3635
filter_by(start_period=start_period).\
3636
filter_by(mac=mac).\
3640
bwusage = models.BandwidthUsage()
3641
bwusage.start_period = start_period
3644
bwusage.last_refreshed = utils.utcnow()
3645
bwusage.bw_in = bw_in
3646
bwusage.bw_out = bw_out
3647
bwusage.save(session=session)
3650
####################
3653
def _instance_type_extra_specs_get_query(context, instance_type_id,
3655
return model_query(context, models.InstanceTypeExtraSpecs,
3656
session=session, read_deleted="no").\
3657
filter_by(instance_type_id=instance_type_id)
3661
def instance_type_extra_specs_get(context, instance_type_id):
3662
rows = _instance_type_extra_specs_get_query(
3663
context, instance_type_id).\
3668
result[row['key']] = row['value']
3674
def instance_type_extra_specs_delete(context, instance_type_id, key):
3675
_instance_type_extra_specs_get_query(
3676
context, instance_type_id).\
3677
filter_by(key=key).\
3678
update({'deleted': True,
3679
'deleted_at': utils.utcnow(),
3680
'updated_at': literal_column('updated_at')})
3684
def instance_type_extra_specs_get_item(context, instance_type_id, key,
3686
result = _instance_type_extra_specs_get_query(
3687
context, instance_type_id, session=session).\
3688
filter_by(key=key).\
3692
raise exception.InstanceTypeExtraSpecsNotFound(
3693
extra_specs_key=key, instance_type_id=instance_type_id)
3699
def instance_type_extra_specs_update_or_create(context, instance_type_id,
3701
session = get_session()
3703
for key, value in specs.iteritems():
3705
spec_ref = instance_type_extra_specs_get_item(
3706
context, instance_type_id, key, session)
3707
except exception.InstanceTypeExtraSpecsNotFound, e:
3708
spec_ref = models.InstanceTypeExtraSpecs()
3709
spec_ref.update({"key": key, "value": value,
3710
"instance_type_id": instance_type_id,
3712
spec_ref.save(session=session)
3719
@require_admin_context
3720
def volume_type_create(context, values):
3721
"""Create a new instance type. In order to pass in extra specs,
3722
the values dict should contain a 'extra_specs' key/value pair:
3724
{'extra_specs' : {'k1': 'v1', 'k2': 'v2', ...}}
3727
session = get_session()
3728
with session.begin():
3730
volume_type_get_by_name(context, values['name'], session)
3731
raise exception.VolumeTypeExists(name=values['name'])
3732
except exception.VolumeTypeNotFoundByName:
3735
specs = values.get('extra_specs')
3737
values['extra_specs'] = _metadata_refs(values.get('extra_specs'),
3738
models.VolumeTypeExtraSpecs)
3739
volume_type_ref = models.VolumeTypes()
3740
volume_type_ref.update(values)
3741
volume_type_ref.save()
3742
except Exception, e:
3743
raise exception.DBError(e)
3744
return volume_type_ref
3748
def volume_type_get_all(context, inactive=False, filters=None):
3750
Returns a dict describing all volume_types with name as key.
3752
filters = filters or {}
3754
read_deleted = "yes" if inactive else "no"
3755
rows = model_query(context, models.VolumeTypes,
3756
read_deleted=read_deleted).\
3757
options(joinedload('extra_specs')).\
3761
# TODO(sirp): this patern of converting rows to a result with extra_specs
3762
# is repeated quite a bit, might be worth creating a method for it
3765
result[row['name']] = _dict_with_extra_specs(row)
3771
def volume_type_get(context, id, session=None):
3772
"""Returns a dict describing specific volume_type"""
3773
result = model_query(context, models.VolumeTypes, session=session).\
3774
options(joinedload('extra_specs')).\
3779
raise exception.VolumeTypeNotFound(volume_type=id)
3781
return _dict_with_extra_specs(result)
3785
def volume_type_get_by_name(context, name, session=None):
3786
"""Returns a dict describing specific volume_type"""
3787
result = model_query(context, models.VolumeTypes, session=session).\
3788
options(joinedload('extra_specs')).\
3789
filter_by(name=name).\
3793
raise exception.VolumeTypeNotFoundByName(volume_type_name=name)
3795
return _dict_with_extra_specs(result)
3798
@require_admin_context
3799
def volume_type_destroy(context, name):
3800
session = get_session()
3801
with session.begin():
3802
volume_type_ref = volume_type_get_by_name(context, name,
3804
volume_type_id = volume_type_ref['id']
3805
session.query(models.VolumeTypes).\
3806
filter_by(id=volume_type_id).\
3807
update({'deleted': True,
3808
'deleted_at': utils.utcnow(),
3809
'updated_at': literal_column('updated_at')})
3810
session.query(models.VolumeTypeExtraSpecs).\
3811
filter_by(volume_type_id=volume_type_id).\
3812
update({'deleted': True,
3813
'deleted_at': utils.utcnow(),
3814
'updated_at': literal_column('updated_at')})
3817
####################
3820
def _volume_type_extra_specs_query(context, volume_type_id, session=None):
3821
return model_query(context, models.VolumeTypeExtraSpecs, session=session,
3822
read_deleted="no").\
3823
filter_by(volume_type_id=volume_type_id)
3827
def volume_type_extra_specs_get(context, volume_type_id):
3828
rows = _volume_type_extra_specs_query(context, volume_type_id).\
3833
result[row['key']] = row['value']
3839
def volume_type_extra_specs_delete(context, volume_type_id, key):
3840
_volume_type_extra_specs_query(context, volume_type_id).\
3841
filter_by(key=key).\
3842
update({'deleted': True,
3843
'deleted_at': utils.utcnow(),
3844
'updated_at': literal_column('updated_at')})
3848
def volume_type_extra_specs_get_item(context, volume_type_id, key,
3850
result = _volume_type_extra_specs_query(
3851
context, volume_type_id, session=session).\
3852
filter_by(key=key).\
3856
raise exception.VolumeTypeExtraSpecsNotFound(
3857
extra_specs_key=key, volume_type_id=volume_type_id)
3863
def volume_type_extra_specs_update_or_create(context, volume_type_id,
3865
session = get_session()
3867
for key, value in specs.iteritems():
3869
spec_ref = volume_type_extra_specs_get_item(
3870
context, volume_type_id, key, session)
3871
except exception.VolumeTypeExtraSpecsNotFound, e:
3872
spec_ref = models.VolumeTypeExtraSpecs()
3873
spec_ref.update({"key": key, "value": value,
3874
"volume_type_id": volume_type_id,
3876
spec_ref.save(session=session)
3880
####################
3883
def s3_image_get(context, image_id):
3884
"""Find local s3 image represented by the provided id"""
3885
result = model_query(context, models.S3Image, read_deleted="yes").\
3886
filter_by(id=image_id).\
3890
raise exception.ImageNotFound(image_id=image_id)
3895
def s3_image_get_by_uuid(context, image_uuid):
3896
"""Find local s3 image represented by the provided uuid"""
3897
result = model_query(context, models.S3Image, read_deleted="yes").\
3898
filter_by(uuid=image_uuid).\
3902
raise exception.ImageNotFound(image_id=image_uuid)
3907
def s3_image_create(context, image_uuid):
3908
"""Create local s3 image represented by provided uuid"""
3910
s3_image_ref = models.S3Image()
3911
s3_image_ref.update({'uuid': image_uuid})
3913
except Exception, e:
3914
raise exception.DBError(e)
3919
####################
3922
@require_admin_context
3923
def sm_backend_conf_create(context, values):
3924
backend_conf = models.SMBackendConf()
3925
backend_conf.update(values)
3930
@require_admin_context
3931
def sm_backend_conf_update(context, sm_backend_id, values):
3932
session = get_session()
3933
with session.begin():
3934
backend_conf = model_query(context, models.SMBackendConf,
3936
read_deleted="yes").\
3937
filter_by(id=sm_backend_id).\
3940
if not backend_conf:
3941
raise exception.NotFound(
3942
_("No backend config with id %(sm_backend_id)s") % locals())
3944
backend_conf.update(values)
3945
backend_conf.save(session=session)
3949
@require_admin_context
3950
def sm_backend_conf_delete(context, sm_backend_id):
3951
# FIXME(sirp): for consistency, shouldn't this just mark as deleted with
3952
# `purge` actually deleting the record?
3953
session = get_session()
3954
with session.begin():
3955
model_query(context, models.SMBackendConf, session=session,
3956
read_deleted="yes").\
3957
filter_by(id=sm_backend_id).\
3961
@require_admin_context
3962
def sm_backend_conf_get(context, sm_backend_id):
3963
result = model_query(context, models.SMBackendConf, read_deleted="yes").\
3964
filter_by(id=sm_backend_id).\
3968
raise exception.NotFound(_("No backend config with id "
3969
"%(sm_backend_id)s") % locals())
3974
@require_admin_context
3975
def sm_backend_conf_get_by_sr(context, sr_uuid):
3976
session = get_session()
3977
return model_query(context, models.SMBackendConf, read_deleted="yes").\
3978
filter_by(sr_uuid=sr_uuid).\
3982
@require_admin_context
3983
def sm_backend_conf_get_all(context):
3984
return model_query(context, models.SMBackendConf, read_deleted="yes").\
3988
####################
3991
def _sm_flavor_get_query(context, sm_flavor_label, session=None):
3992
return model_query(context, models.SMFlavors, session=session,
3993
read_deleted="yes").\
3994
filter_by(label=sm_flavor_label)
3997
@require_admin_context
3998
def sm_flavor_create(context, values):
3999
sm_flavor = models.SMFlavors()
4000
sm_flavor.update(values)
4005
@require_admin_context
4006
def sm_flavor_update(context, sm_flavor_label, values):
4007
sm_flavor = sm_flavor_get(context, sm_flavor_label)
4008
sm_flavor.update(values)
4013
@require_admin_context
4014
def sm_flavor_delete(context, sm_flavor_label):
4015
session = get_session()
4016
with session.begin():
4017
_sm_flavor_get_query(context, sm_flavor_label).delete()
4020
@require_admin_context
4021
def sm_flavor_get(context, sm_flavor_label):
4022
result = _sm_flavor_get_query(context, sm_flavor_label).first()
4025
raise exception.NotFound(
4026
_("No sm_flavor called %(sm_flavor)s") % locals())
4031
@require_admin_context
4032
def sm_flavor_get_all(context):
4033
return model_query(context, models.SMFlavors, read_deleted="yes").all()
4036
###############################
4039
def _sm_volume_get_query(context, volume_id, session=None):
4040
return model_query(context, models.SMVolume, session=session,
4041
read_deleted="yes").\
4042
filter_by(id=volume_id)
4045
def sm_volume_create(context, values):
4046
sm_volume = models.SMVolume()
4047
sm_volume.update(values)
4052
def sm_volume_update(context, volume_id, values):
4053
sm_volume = sm_volume_get(context, volume_id)
4054
sm_volume.update(values)
4059
def sm_volume_delete(context, volume_id):
4060
session = get_session()
4061
with session.begin():
4062
_sm_volume_get_query(context, volume_id, session=session).delete()
4065
def sm_volume_get(context, volume_id):
4066
result = _sm_volume_get_query(context, volume_id).first()
4069
raise exception.NotFound(
4070
_("No sm_volume with id %(volume_id)s") % locals())
4075
def sm_volume_get_all(context):
4076
return model_query(context, models.SMVolume, read_deleted="yes").all()
4082
def _aggregate_get_query(context, model_class, id_field, id,
4083
session=None, read_deleted='yes'):
4084
return model_query(context, model_class, session=session,
4085
read_deleted=read_deleted).filter(id_field == id)
4088
@require_admin_context
4089
def aggregate_create(context, values, metadata=None):
4090
session = get_session()
4091
aggregate = _aggregate_get_query(context,
4093
models.Aggregate.name,
4096
read_deleted='yes').first()
4097
values.setdefault('operational_state', aggregate_states.CREATED)
4099
aggregate = models.Aggregate()
4100
aggregate.update(values)
4101
aggregate.save(session=session)
4102
elif aggregate.deleted:
4103
values['deleted'] = False
4104
values['deleted_at'] = None
4105
aggregate.update(values)
4106
aggregate.save(session=session)
4108
raise exception.AggregateNameExists(aggregate_name=values['name'])
4110
aggregate_metadata_add(context, aggregate.id, metadata)
4114
@require_admin_context
4115
def aggregate_get(context, aggregate_id, read_deleted='no'):
4116
aggregate = _aggregate_get_query(context,
4118
models.Aggregate.id, aggregate_id,
4119
read_deleted=read_deleted).first()
4122
raise exception.AggregateNotFound(aggregate_id=aggregate_id)
4127
@require_admin_context
4128
def aggregate_get_by_host(context, host, read_deleted='no'):
4129
aggregate_host = _aggregate_get_query(context,
4130
models.AggregateHost,
4131
models.AggregateHost.host,
4133
read_deleted='no').first()
4135
if not aggregate_host:
4136
raise exception.AggregateHostNotFound(host=host)
4138
return aggregate_get(context, aggregate_host.aggregate_id, read_deleted)
4141
@require_admin_context
4142
def aggregate_update(context, aggregate_id, values):
4143
session = get_session()
4144
aggregate = _aggregate_get_query(context,
4146
models.Aggregate.id, aggregate_id,
4148
read_deleted='no').first()
4150
metadata = values.get('metadata')
4151
if metadata is not None:
4152
aggregate_metadata_add(context,
4154
values.pop('metadata'),
4156
with session.begin():
4157
aggregate.update(values)
4158
aggregate.save(session=session)
4159
values['metadata'] = metadata
4162
raise exception.AggregateNotFound(aggregate_id=aggregate_id)
4165
@require_admin_context
4166
def aggregate_delete(context, aggregate_id):
4167
query = _aggregate_get_query(context,
4169
models.Aggregate.id, aggregate_id,
4172
query.update({'deleted': True,
4173
'deleted_at': utils.utcnow(),
4174
'operational_state': aggregate_states.DISMISSED,
4175
'updated_at': literal_column('updated_at')})
4177
raise exception.AggregateNotFound(aggregate_id=aggregate_id)
4180
@require_admin_context
4181
def aggregate_get_all(context, read_deleted='yes'):
4182
return model_query(context,
4184
read_deleted=read_deleted).all()
4187
@require_admin_context
4188
@require_aggregate_exists
4189
def aggregate_metadata_get(context, aggregate_id, read_deleted='no'):
4190
rows = model_query(context,
4191
models.AggregateMetadata,
4192
read_deleted=read_deleted).\
4193
filter_by(aggregate_id=aggregate_id).all()
4195
return dict([(r['key'], r['value']) for r in rows])
4198
@require_admin_context
4199
@require_aggregate_exists
4200
def aggregate_metadata_delete(context, aggregate_id, key):
4201
query = _aggregate_get_query(context,
4202
models.AggregateMetadata,
4203
models.AggregateMetadata.aggregate_id,
4204
aggregate_id, read_deleted='no').\
4207
query.update({'deleted': True,
4208
'deleted_at': utils.utcnow(),
4209
'updated_at': literal_column('updated_at')})
4211
raise exception.AggregateMetadataNotFound(aggregate_id=aggregate_id,
4215
@require_admin_context
4216
@require_aggregate_exists
4217
def aggregate_metadata_get_item(context, aggregate_id, key,
4218
session=None, read_deleted='yes'):
4219
result = _aggregate_get_query(context,
4220
models.AggregateMetadata,
4221
models.AggregateMetadata.aggregate_id,
4222
aggregate_id, session=session,
4223
read_deleted=read_deleted).\
4224
filter_by(key=key).first()
4227
raise exception.AggregateMetadataNotFound(metadata_key=key,
4228
aggregate_id=aggregate_id)
4233
@require_admin_context
4234
@require_aggregate_exists
4235
def aggregate_metadata_add(context, aggregate_id, metadata, set_delete=False):
4236
session = get_session()
4239
original_metadata = aggregate_metadata_get(context, aggregate_id)
4240
for meta_key, meta_value in original_metadata.iteritems():
4241
if meta_key not in metadata:
4242
meta_ref = aggregate_metadata_get_item(context, aggregate_id,
4244
meta_ref.update({'deleted': True})
4245
meta_ref.save(session=session)
4249
for meta_key, meta_value in metadata.iteritems():
4250
item = {"value": meta_value}
4252
meta_ref = aggregate_metadata_get_item(context, aggregate_id,
4254
if meta_ref.deleted:
4255
item.update({'deleted': False, 'deleted_at': None})
4256
except exception.AggregateMetadataNotFound:
4257
meta_ref = models.AggregateMetadata()
4258
item.update({"key": meta_key, "aggregate_id": aggregate_id})
4260
meta_ref.update(item)
4261
meta_ref.save(session=session)
4266
@require_admin_context
4267
@require_aggregate_exists
4268
def aggregate_host_get_all(context, aggregate_id, read_deleted='yes'):
4269
rows = model_query(context,
4270
models.AggregateHost,
4271
read_deleted=read_deleted).\
4272
filter_by(aggregate_id=aggregate_id).all()
4274
return [r.host for r in rows]
4277
@require_admin_context
4278
@require_aggregate_exists
4279
def aggregate_host_delete(context, aggregate_id, host):
4280
query = _aggregate_get_query(context,
4281
models.AggregateHost,
4282
models.AggregateHost.aggregate_id,
4284
read_deleted='no').filter_by(host=host)
4286
query.update({'deleted': True,
4287
'deleted_at': utils.utcnow(),
4288
'updated_at': literal_column('updated_at')})
4290
raise exception.AggregateHostNotFound(aggregate_id=aggregate_id,
4294
@require_admin_context
4295
@require_aggregate_exists
4296
def aggregate_host_add(context, aggregate_id, host):
4297
session = get_session()
4298
host_ref = _aggregate_get_query(context,
4299
models.AggregateHost,
4300
models.AggregateHost.aggregate_id,
4303
read_deleted='yes').\
4304
filter_by(host=host).first()
4307
host_ref = models.AggregateHost()
4308
values = {"host": host, "aggregate_id": aggregate_id, }
4309
host_ref.update(values)
4310
host_ref.save(session=session)
4311
except exception.DBError:
4312
raise exception.AggregateHostConflict(host=host)
4313
elif host_ref.deleted:
4314
host_ref.update({'deleted': False, 'deleted_at': None})
4315
host_ref.save(session=session)
4317
raise exception.AggregateHostExists(host=host,
4318
aggregate_id=aggregate_id)
4325
def instance_fault_create(context, values):
4326
"""Create a new InstanceFault."""
4327
fault_ref = models.InstanceFault()
4328
fault_ref.update(values)
4330
return dict(fault_ref.iteritems())
4333
def instance_fault_get_by_instance_uuids(context, instance_uuids):
4334
"""Get all instance faults for the provided instance_uuids."""
4335
rows = model_query(context, models.InstanceFault, read_deleted='no').\
4336
filter(models.InstanceFault.instance_uuid.in_(
4338
order_by(desc("created_at")).\
4342
for instance_uuid in instance_uuids:
4343
output[instance_uuid] = []
4346
data = dict(row.iteritems())
4347
output[row['instance_uuid']].append(data)