~ubuntu-branches/ubuntu/vivid/ceilometer/vivid

« back to all changes in this revision

Viewing changes to ceilometer/api/controllers/v2.py

  • Committer: Package Import Robot
  • Author(s): Chuck Short
  • Date: 2014-03-06 14:44:28 UTC
  • mto: (28.1.1 utopic-proposed) (1.2.1)
  • mto: This revision was merged to the branch mainline in revision 19.
  • Revision ID: package-import@ubuntu.com-20140306144428-rvphsh4igwyulzf0
Tags: upstream-2014.1~b3
ImportĀ upstreamĀ versionĀ 2014.1~b3

Show diffs side-by-side

added added

removed removed

Lines of Context:
3
3
# Copyright Ā© 2012 New Dream Network, LLC (DreamHost)
4
4
# Copyright 2013 IBM Corp.
5
5
# Copyright Ā© 2013 eNovance <licensing@enovance.com>
 
6
# Copyright Ericsson AB 2013. All rights reserved
6
7
#
7
8
# Authors: Doug Hellmann <doug.hellmann@dreamhost.com>
8
9
#          Angus Salkeld <asalkeld@redhat.com>
9
10
#          Eoghan Glynn <eglynn@redhat.com>
10
11
#          Julien Danjou <julien@danjou.info>
 
12
#          Ildiko Vancsa <ildiko.vancsa@ericsson.com>
 
13
#          Balazs Gibizer <balazs.gibizer@ericsson.com>
11
14
#
12
15
# Licensed under the Apache License, Version 2.0 (the "License"); you may
13
16
# not use this file except in compliance with the License. You may obtain
25
28
import ast
26
29
import base64
27
30
import copy
 
31
import croniter
28
32
import datetime
29
33
import functools
30
34
import inspect
31
35
import json
 
36
import jsonschema
 
37
import pytz
32
38
import uuid
33
39
 
34
40
from oslo.config import cfg
57
63
ALARM_API_OPTS = [
58
64
    cfg.BoolOpt('record_history',
59
65
                default=True,
60
 
                help='Record alarm change events'
 
66
                help='Record alarm change events.'
61
67
                ),
62
68
]
63
69
 
82
88
            status_code=404)
83
89
 
84
90
 
85
 
class BoundedInt(wtypes.UserType):
86
 
    basetype = int
87
 
 
88
 
    def __init__(self, min=None, max=None):
89
 
        self.min = min
90
 
        self.max = max
91
 
 
92
 
    @property
93
 
    def name(self):
94
 
        if self.min is not None and self.max is not None:
95
 
            return 'int between %d and %d' % (self.min, self.max)
96
 
        elif self.min is not None:
97
 
            return 'int greater than %d' % self.min
98
 
        else:
99
 
            return 'int lower than %d' % self.max
100
 
 
101
 
    @staticmethod
102
 
    def frombasetype(value):
103
 
        return int(value) if value is not None else None
104
 
 
105
 
    def validate(self, value):
106
 
        if self.min is not None and value < self.min:
107
 
            error = _('Value %(value)s is invalid (should be greater or equal '
108
 
                      'to %(min)s)') % dict(value=value, min=self.min)
109
 
            raise ClientSideError(error)
110
 
 
111
 
        if self.max is not None and value > self.max:
112
 
            error = _('Value %(value)s is invalid (should be lower or equal '
113
 
                      'to %(max)s)') % dict(value=value, max=self.max)
114
 
            raise ClientSideError(error)
115
 
        return value
116
 
 
117
 
 
118
91
class AdvEnum(wtypes.wsproperty):
119
92
    """Handle default and mandatory for wtypes.Enum
120
93
    """
137
110
            setattr(parent, self._name, value)
138
111
 
139
112
 
 
113
class CronType(wtypes.UserType):
 
114
    """A user type that represents a cron format."""
 
115
    basetype = six.string_types
 
116
    name = 'cron'
 
117
 
 
118
    @staticmethod
 
119
    def validate(value):
 
120
        # raises ValueError if invalid
 
121
        croniter.croniter(value)
 
122
        return value
 
123
 
 
124
 
140
125
class _Base(wtypes.Base):
141
126
 
142
127
    @classmethod
189
174
    # Functions to convert the data field to the correct type.
190
175
    _type_converters = {'integer': int,
191
176
                        'float': float,
192
 
                        'boolean': strutils.bool_from_string,
 
177
                        'boolean': functools.partial(
 
178
                            strutils.bool_from_string, strict=True),
193
179
                        'string': six.text_type,
194
180
                        'datetime': timeutils.parse_isotime}
195
181
 
345
331
                raise ProjectNotAuthorized(q.value)
346
332
 
347
333
 
348
 
def _validate_query(query, db_func, internal_keys=[]):
 
334
def _validate_query(query, db_func, internal_keys=[],
 
335
                    is_timestamp_valid=True):
349
336
    _verify_query_segregation(query)
350
337
 
351
338
    valid_keys = inspect.getargspec(db_func)[0]
354
341
    translation = {'user_id': 'user',
355
342
                   'project_id': 'project',
356
343
                   'resource_id': 'resource'}
357
 
    has_timestamp = False
 
344
 
 
345
    has_timestamp_query = _validate_timestamp_fields(query,
 
346
                                                     'timestamp',
 
347
                                                     ('lt', 'le', 'gt', 'ge'),
 
348
                                                     is_timestamp_valid)
 
349
    has_search_offset_query = _validate_timestamp_fields(query,
 
350
                                                         'search_offset',
 
351
                                                         ('eq'),
 
352
                                                         is_timestamp_valid)
 
353
 
 
354
    if has_search_offset_query and not has_timestamp_query:
 
355
        raise wsme.exc.InvalidInput('field', 'search_offset',
 
356
                                    "search_offset cannot be used without " +
 
357
                                    "timestamp")
 
358
 
358
359
    for i in query:
359
 
        if i.field == 'timestamp':
360
 
            has_timestamp = True
361
 
            if i.op not in ('lt', 'le', 'gt', 'ge'):
362
 
                raise wsme.exc.InvalidInput('op', i.op,
363
 
                                            'unimplemented operator for %s' %
364
 
                                            i.field)
365
 
        else:
 
360
        if i.field not in ('timestamp', 'search_offset'):
366
361
            if i.op == 'eq':
367
 
                if i.field == 'search_offset':
368
 
                    has_timestamp = True
369
 
                elif i.field == 'enabled':
 
362
                if i.field == 'enabled':
370
363
                    i._get_value_as_type('boolean')
371
 
                elif i.field.startswith('metadata.'):
372
 
                    i._get_value_as_type()
373
 
                elif i.field.startswith('resource_metadata.'):
 
364
                elif (i.field.startswith('metadata.') or
 
365
                      i.field.startswith('resource_metadata.')):
374
366
                    i._get_value_as_type()
375
367
                else:
376
368
                    key = translation.get(i.field, i.field)
383
375
                                            'unimplemented operator for %s' %
384
376
                                            i.field)
385
377
 
386
 
    if has_timestamp and not ('start' in valid_keys or
387
 
                              'start_timestamp' in valid_keys):
388
 
        raise wsme.exc.UnknownArgument('timestamp',
389
 
                                       "not valid for this resource")
390
 
 
391
 
 
392
 
def _query_to_kwargs(query, db_func, internal_keys=[]):
393
 
    _validate_query(query, db_func, internal_keys=internal_keys)
 
378
 
 
379
def _validate_timestamp_fields(query, field_name, operator_list,
 
380
                               is_timestamp_valid):
 
381
    for item in query:
 
382
        if item.field == field_name:
 
383
            #If *timestamp* or *search_offset* field was specified in the
 
384
            #query, but timestamp is not supported on that resource, on
 
385
            #which the query was invoked, then raise an exception.
 
386
            if not is_timestamp_valid:
 
387
                raise wsme.exc.UnknownArgument(field_name,
 
388
                                               "not valid for " +
 
389
                                               "this resource")
 
390
            if item.op not in operator_list:
 
391
                raise wsme.exc.InvalidInput('op', item.op,
 
392
                                            'unimplemented operator for %s' %
 
393
                                            item.field)
 
394
            return True
 
395
    return False
 
396
 
 
397
 
 
398
def _query_to_kwargs(query, db_func, internal_keys=[],
 
399
                     is_timestamp_valid=True):
 
400
    _validate_query(query, db_func, internal_keys=internal_keys,
 
401
                    is_timestamp_valid=is_timestamp_valid)
394
402
    query = _sanitize_query(query, db_func)
395
403
    internal_keys.append('self')
396
404
    valid_keys = set(inspect.getargspec(db_func)[0]) - set(internal_keys)
505
513
 
506
514
 
507
515
def _flatten_metadata(metadata):
508
 
    """Return flattened resource metadata without nested structures
509
 
    and with all values converted to unicode strings.
 
516
    """Return flattened resource metadata with flattened nested
 
517
    structures (except nested sets) and with all values converted
 
518
    to unicode strings.
510
519
    """
511
520
    if metadata:
512
 
        return dict((k, unicode(v))
 
521
        # After changing recursive_keypairs` output we need to keep
 
522
        # flattening output unchanged.
 
523
        # Example: recursive_keypairs({'a': {'b':{'c':'d'}}}, '.')
 
524
        # output before: a.b:c=d
 
525
        # output now: a.b.c=d
 
526
        # So to keep the first variant just replace all dots except the first
 
527
        return dict((k.replace('.', ':').replace(':', '.', 1), unicode(v))
513
528
                    for k, v in utils.recursive_keypairs(metadata,
514
529
                                                         separator='.')
515
 
                    if type(v) not in set([list, set]))
 
530
                    if type(v) is not set)
516
531
    return {}
517
532
 
518
533
 
568
583
    timestamp = datetime.datetime
569
584
    "UTC date and time when the measurement was made"
570
585
 
 
586
    recorded_at = datetime.datetime
 
587
    "When the sample has been recorded."
 
588
 
571
589
    resource_metadata = {wtypes.text: wtypes.text}
572
590
    "Arbitrary metadata associated with the resource"
573
591
 
600
618
                   resource_id='bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
601
619
                   project_id='35b17138-b364-4e6a-a131-8f3099c5be68',
602
620
                   user_id='efd87807-12d2-4b38-9c70-5f5c2ac427ff',
 
621
                   recorded_at=datetime.datetime.utcnow(),
603
622
                   timestamp=datetime.datetime.utcnow(),
604
623
                   resource_metadata={'name1': 'value1',
605
624
                                      'name2': 'value2'},
632
651
    count = int
633
652
    "The number of samples seen"
634
653
 
 
654
    aggregate = {wtypes.text: float}
 
655
    "The selectable aggregate value(s)"
 
656
 
635
657
    duration = float
636
658
    "The difference, in seconds, between the oldest and newest timestamp"
637
659
 
676
698
        # "invalid."
677
699
        #
678
700
        # If the timestamps are invalid, return None as a
679
 
        # sentinal indicating that there is something "funny"
 
701
        # sentinel indicating that there is something "funny"
680
702
        # about the range.
681
703
        if (self.duration_start and
682
704
                self.duration_end and
702
724
                   )
703
725
 
704
726
 
 
727
class Aggregate(_Base):
 
728
 
 
729
    func = wsme.wsattr(wtypes.text, mandatory=True)
 
730
    "The aggregation function name"
 
731
 
 
732
    param = wsme.wsattr(wtypes.text, default=None)
 
733
    "The paramter to the aggregation function"
 
734
 
 
735
    def __init__(self, **kwargs):
 
736
        super(Aggregate, self).__init__(**kwargs)
 
737
 
 
738
    @staticmethod
 
739
    def validate(aggregate):
 
740
        return aggregate
 
741
 
 
742
    @classmethod
 
743
    def sample(cls):
 
744
        return cls(func='cardinality',
 
745
                   param='resource_id')
 
746
 
 
747
 
705
748
class MeterController(rest.RestController):
706
749
    """Manages operations on a single meter.
707
750
    """
709
752
        'statistics': ['GET'],
710
753
    }
711
754
 
712
 
    def __init__(self, meter_id):
713
 
        pecan.request.context['meter_id'] = meter_id
714
 
        self._id = meter_id
 
755
    def __init__(self, meter_name):
 
756
        pecan.request.context['meter_name'] = meter_name
 
757
        self.meter_name = meter_name
715
758
 
716
759
    @wsme_pecan.wsexpose([OldSample], [Query], int)
717
760
    def get_all(self, q=[], limit=None):
723
766
        if limit and limit < 0:
724
767
            raise ClientSideError(_("Limit must be positive"))
725
768
        kwargs = _query_to_kwargs(q, storage.SampleFilter.__init__)
726
 
        kwargs['meter'] = self._id
 
769
        kwargs['meter'] = self.meter_name
727
770
        f = storage.SampleFilter(**kwargs)
728
771
        return [OldSample.from_db_model(e)
729
772
                for e in pecan.request.storage_conn.get_samples(f, limit=limit)
731
774
 
732
775
    @wsme_pecan.wsexpose([OldSample], body=[OldSample])
733
776
    def post(self, samples):
734
 
        """Post a list of new Samples to Ceilometer.
 
777
        """Post a list of new Samples to Telemetry.
735
778
 
736
779
        :param samples: a list of samples within the request body.
737
780
        """
743
786
 
744
787
        published_samples = []
745
788
        for s in samples:
746
 
            if self._id != s.counter_name:
 
789
            if self.meter_name != s.counter_name:
747
790
                raise wsme.exc.InvalidInput('counter_name', s.counter_name,
748
 
                                            'should be %s' % self._id)
 
791
                                            'should be %s' % self.meter_name)
749
792
 
750
793
            if s.message_id:
751
794
                raise wsme.exc.InvalidInput('message_id', s.message_id,
788
831
 
789
832
        return samples
790
833
 
791
 
    @wsme_pecan.wsexpose([Statistics], [Query], [unicode], int)
792
 
    def statistics(self, q=[], groupby=[], period=None):
 
834
    @wsme_pecan.wsexpose([Statistics], [Query], [unicode], int, [Aggregate])
 
835
    def statistics(self, q=[], groupby=[], period=None, aggregate=[]):
793
836
        """Computes the statistics of the samples in the time range given.
794
837
 
795
838
        :param q: Filter rules for the data to be returned.
796
839
        :param groupby: Fields for group by aggregation
797
840
        :param period: Returned result will be an array of statistics for a
798
841
                       period long of that number of seconds.
 
842
        :param aggregate: The selectable aggregation functions to be applied.
799
843
        """
800
844
        if period and period < 0:
801
845
            raise ClientSideError(_("Period must be positive."))
802
846
 
803
847
        kwargs = _query_to_kwargs(q, storage.SampleFilter.__init__)
804
 
        kwargs['meter'] = self._id
 
848
        kwargs['meter'] = self.meter_name
805
849
        f = storage.SampleFilter(**kwargs)
806
850
        g = _validate_groupby_fields(groupby)
807
851
        computed = pecan.request.storage_conn.get_meter_statistics(f,
808
852
                                                                   period,
809
 
                                                                   g)
 
853
                                                                   g,
 
854
                                                                   aggregate)
810
855
        LOG.debug(_('computed value coming from %r'),
811
856
                  pecan.request.storage_conn)
812
857
        # Find the original timestamp in the query to use for clamping
874
919
    """Works on meters."""
875
920
 
876
921
    @pecan.expose()
877
 
    def _lookup(self, meter_id, *remainder):
878
 
        # NOTE(gordc): drop last path if empty (Bug #1202739)
879
 
        if remainder and not remainder[-1]:
880
 
            remainder = remainder[:-1]
881
 
        return MeterController(meter_id), remainder
 
922
    def _lookup(self, meter_name, *remainder):
 
923
        return MeterController(meter_name), remainder
882
924
 
883
925
    @wsme_pecan.wsexpose([Meter], [Query])
884
926
    def get_all(self, q=[]):
886
928
 
887
929
        :param q: Filter rules for the meters to be returned.
888
930
        """
889
 
        kwargs = _query_to_kwargs(q, pecan.request.storage_conn.get_meters)
 
931
        #Timestamp field is not supported for Meter queries
 
932
        kwargs = _query_to_kwargs(q, pecan.request.storage_conn.get_meters,
 
933
                                  is_timestamp_valid=False)
890
934
        return [Meter.from_db_model(m)
891
935
                for m in pecan.request.storage_conn.get_meters(**kwargs)]
892
936
 
924
968
    timestamp = datetime.datetime
925
969
    "When the sample has been generated."
926
970
 
 
971
    recorded_at = datetime.datetime
 
972
    "When the sample has been recorded."
 
973
 
927
974
    metadata = {wtypes.text: wtypes.text}
928
975
    "Arbitrary metadata associated with the sample."
929
976
 
937
984
                   user_id=m.user_id,
938
985
                   project_id=m.project_id,
939
986
                   resource_id=m.resource_id,
 
987
                   source=m.source,
940
988
                   timestamp=m.timestamp,
 
989
                   recorded_at=m.recorded_at,
941
990
                   metadata=_flatten_metadata(m.resource_metadata))
942
991
 
943
992
    @classmethod
946
995
                   meter='instance',
947
996
                   type='gauge',
948
997
                   unit='instance',
 
998
                   volume=1,
949
999
                   resource_id='bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
950
1000
                   project_id='35b17138-b364-4e6a-a131-8f3099c5be68',
951
1001
                   user_id='efd87807-12d2-4b38-9c70-5f5c2ac427ff',
952
1002
                   timestamp=timeutils.utcnow(),
 
1003
                   recorded_at=datetime.datetime.utcnow(),
953
1004
                   source='openstack',
954
1005
                   metadata={'name1': 'value1',
955
1006
                             'name2': 'value2'},
988
1039
        return Sample.from_db_model(samples[0])
989
1040
 
990
1041
 
 
1042
class ComplexQuery(_Base):
 
1043
    """Holds a sample query encoded in json."""
 
1044
 
 
1045
    filter = wtypes.text
 
1046
    "The filter expression encoded in json."
 
1047
 
 
1048
    orderby = wtypes.text
 
1049
    "List of single-element dicts for specifing the ordering of the results."
 
1050
 
 
1051
    limit = int
 
1052
    "The maximum number of results to be returned."
 
1053
 
 
1054
    @classmethod
 
1055
    def sample(cls):
 
1056
        return cls(filter='{\"and\": [{\"and\": [{\"=\": ' +
 
1057
                          '{\"counter_name\": \"cpu_util\"}}, ' +
 
1058
                          '{\">\": {\"counter_volume\": 0.23}}, ' +
 
1059
                          '{\"<\": {\"counter_volume\": 0.26}}]}, ' +
 
1060
                          '{\"or\": [{\"and\": [{\">\": ' +
 
1061
                          '{\"timestamp\": \"2013-12-01T18:00:00\"}}, ' +
 
1062
                          '{\"<\": ' +
 
1063
                          '{\"timestamp\": \"2013-12-01T18:15:00\"}}]}, ' +
 
1064
                          '{\"and\": [{\">\": ' +
 
1065
                          '{\"timestamp\": \"2013-12-01T18:30:00\"}}, ' +
 
1066
                          '{\"<\": ' +
 
1067
                          '{\"timestamp\": \"2013-12-01T18:45:00\"}}]}]}]}',
 
1068
                   orderby='[{\"counter_volume\": \"ASC\"}, ' +
 
1069
                           '{\"timestamp\": \"DESC\"}]',
 
1070
                   limit=42
 
1071
                   )
 
1072
 
 
1073
 
 
1074
def _list_to_regexp(items, regexp_prefix=""):
 
1075
    regexp = ["^%s$" % item for item in items]
 
1076
    regexp = regexp_prefix + "|".join(regexp)
 
1077
    return regexp
 
1078
 
 
1079
 
 
1080
class ValidatedComplexQuery(object):
 
1081
    complex_operators = ["and", "or"]
 
1082
    order_directions = ["asc", "desc"]
 
1083
    simple_ops = ["=", "!=", "<", ">", "<=", "=<", ">=", "=>"]
 
1084
    regexp_prefix = "(?i)"
 
1085
 
 
1086
    complex_ops = _list_to_regexp(complex_operators, regexp_prefix)
 
1087
    simple_ops = _list_to_regexp(simple_ops, regexp_prefix)
 
1088
    order_directions = _list_to_regexp(order_directions, regexp_prefix)
 
1089
 
 
1090
    timestamp_fields = ["timestamp", "state_timestamp"]
 
1091
    name_mapping = {"user": "user_id",
 
1092
                    "project": "project_id",
 
1093
                    "resource": "resource_id"}
 
1094
 
 
1095
    def __init__(self, query, db_model, additional_valid_keys,
 
1096
                 metadata_allowed=False):
 
1097
        valid_keys = db_model.get_field_names()
 
1098
        valid_keys = list(valid_keys) + additional_valid_keys
 
1099
        valid_fields = _list_to_regexp(valid_keys)
 
1100
 
 
1101
        if metadata_allowed:
 
1102
            valid_filter_fields = valid_fields + "|^metadata\.[\S]+$"
 
1103
        else:
 
1104
            valid_filter_fields = valid_fields
 
1105
 
 
1106
        schema_value = {
 
1107
            "oneOf": [{"type": "string"},
 
1108
                      {"type": "number"},
 
1109
                      {"type": "boolean"}],
 
1110
            "minProperties": 1,
 
1111
            "maxProperties": 1}
 
1112
 
 
1113
        schema_value_in = {
 
1114
            "type": "array",
 
1115
            "items": {"oneOf": [{"type": "string"},
 
1116
                                {"type": "number"}]}}
 
1117
 
 
1118
        schema_field = {
 
1119
            "type": "object",
 
1120
            "patternProperties": {valid_filter_fields: schema_value},
 
1121
            "additionalProperties": False,
 
1122
            "minProperties": 1,
 
1123
            "maxProperties": 1}
 
1124
 
 
1125
        schema_field_in = {
 
1126
            "type": "object",
 
1127
            "patternProperties": {valid_filter_fields: schema_value_in},
 
1128
            "additionalProperties": False,
 
1129
            "minProperties": 1,
 
1130
            "maxProperties": 1}
 
1131
 
 
1132
        schema_leaf_in = {
 
1133
            "type": "object",
 
1134
            "patternProperties": {"(?i)^in$": schema_field_in},
 
1135
            "additionalProperties": False,
 
1136
            "minProperties": 1,
 
1137
            "maxProperties": 1}
 
1138
 
 
1139
        schema_leaf_simple_ops = {
 
1140
            "type": "object",
 
1141
            "patternProperties": {self.simple_ops: schema_field},
 
1142
            "additionalProperties": False,
 
1143
            "minProperties": 1,
 
1144
            "maxProperties": 1}
 
1145
 
 
1146
        schema_and_or_array = {
 
1147
            "type": "array",
 
1148
            "items": {"$ref": "#"},
 
1149
            "minItems": 2}
 
1150
 
 
1151
        schema_and_or = {
 
1152
            "type": "object",
 
1153
            "patternProperties": {self.complex_ops: schema_and_or_array},
 
1154
            "additionalProperties": False,
 
1155
            "minProperties": 1,
 
1156
            "maxProperties": 1}
 
1157
 
 
1158
        schema_not = {
 
1159
            "type": "object",
 
1160
            "patternProperties": {"(?i)^not$": {"$ref": "#"}},
 
1161
            "additionalProperties": False,
 
1162
            "minProperties": 1,
 
1163
            "maxProperties": 1}
 
1164
 
 
1165
        self.schema = {
 
1166
            "oneOf": [{"$ref": "#/definitions/leaf_simple_ops"},
 
1167
                      {"$ref": "#/definitions/leaf_in"},
 
1168
                      {"$ref": "#/definitions/and_or"},
 
1169
                      {"$ref": "#/definitions/not"}],
 
1170
            "minProperties": 1,
 
1171
            "maxProperties": 1,
 
1172
            "definitions": {"leaf_simple_ops": schema_leaf_simple_ops,
 
1173
                            "leaf_in": schema_leaf_in,
 
1174
                            "and_or": schema_and_or,
 
1175
                            "not": schema_not}}
 
1176
 
 
1177
        self.orderby_schema = {
 
1178
            "type": "array",
 
1179
            "items": {
 
1180
                "type": "object",
 
1181
                "patternProperties":
 
1182
                    {valid_fields:
 
1183
                        {"type": "string",
 
1184
                         "pattern": self.order_directions}},
 
1185
                "additionalProperties": False,
 
1186
                "minProperties": 1,
 
1187
                "maxProperties": 1}}
 
1188
 
 
1189
        self.original_query = query
 
1190
 
 
1191
    def validate(self, visibility_field):
 
1192
        """Validates the query content and does the necessary transformations.
 
1193
        """
 
1194
        if self.original_query.filter is wtypes.Unset:
 
1195
            self.filter_expr = None
 
1196
        else:
 
1197
            self.filter_expr = json.loads(self.original_query.filter)
 
1198
            self._validate_filter(self.filter_expr)
 
1199
            self._replace_isotime_with_datetime(self.filter_expr)
 
1200
            self._convert_operator_to_lower_case(self.filter_expr)
 
1201
            self._normalize_field_names_for_db_model(self.filter_expr)
 
1202
 
 
1203
        self._force_visibility(visibility_field)
 
1204
 
 
1205
        if self.original_query.orderby is wtypes.Unset:
 
1206
            self.orderby = None
 
1207
        else:
 
1208
            self.orderby = json.loads(self.original_query.orderby)
 
1209
            self._validate_orderby(self.orderby)
 
1210
            self._convert_orderby_to_lower_case(self.orderby)
 
1211
            self._normalize_field_names_in_orderby(self.orderby)
 
1212
 
 
1213
        if self.original_query.limit is wtypes.Unset:
 
1214
            self.limit = None
 
1215
        else:
 
1216
            self.limit = self.original_query.limit
 
1217
 
 
1218
        if self.limit is not None and self.limit <= 0:
 
1219
            msg = _('Limit should be positive')
 
1220
            raise ClientSideError(msg)
 
1221
 
 
1222
    @staticmethod
 
1223
    def _convert_orderby_to_lower_case(orderby):
 
1224
        for orderby_field in orderby:
 
1225
            utils.lowercase_values(orderby_field)
 
1226
 
 
1227
    def _normalize_field_names_in_orderby(self, orderby):
 
1228
        for orderby_field in orderby:
 
1229
            self._replace_field_names(orderby_field)
 
1230
 
 
1231
    def _traverse_postorder(self, tree, visitor):
 
1232
        op = tree.keys()[0]
 
1233
        if op.lower() in self.complex_operators:
 
1234
            for i, operand in enumerate(tree[op]):
 
1235
                self._traverse_postorder(operand, visitor)
 
1236
        if op.lower() == "not":
 
1237
            self._traverse_postorder(tree[op], visitor)
 
1238
 
 
1239
        visitor(tree)
 
1240
 
 
1241
    def _check_cross_project_references(self, own_project_id,
 
1242
                                        visibility_field):
 
1243
        """Do not allow other than own_project_id
 
1244
        """
 
1245
        def check_project_id(subfilter):
 
1246
            op = subfilter.keys()[0]
 
1247
            if (op.lower() not in self.complex_operators
 
1248
                    and subfilter[op].keys()[0] == visibility_field
 
1249
                    and subfilter[op][visibility_field] != own_project_id):
 
1250
                raise ProjectNotAuthorized(subfilter[op][visibility_field])
 
1251
 
 
1252
        self._traverse_postorder(self.filter_expr, check_project_id)
 
1253
 
 
1254
    def _force_visibility(self, visibility_field):
 
1255
        """If the tenant is not admin insert an extra
 
1256
        "and <visibility_field>=<tenant's project_id>" clause to the query
 
1257
        """
 
1258
        authorized_project = acl.get_limited_to_project(pecan.request.headers)
 
1259
        is_admin = authorized_project is None
 
1260
        if not is_admin:
 
1261
            self._restrict_to_project(authorized_project, visibility_field)
 
1262
            self._check_cross_project_references(authorized_project,
 
1263
                                                 visibility_field)
 
1264
 
 
1265
    def _restrict_to_project(self, project_id, visibility_field):
 
1266
        restriction = {"=": {visibility_field: project_id}}
 
1267
        if self.filter_expr is None:
 
1268
            self.filter_expr = restriction
 
1269
        else:
 
1270
            self.filter_expr = {"and": [restriction, self.filter_expr]}
 
1271
 
 
1272
    def _replace_isotime_with_datetime(self, filter_expr):
 
1273
        def replace_isotime(subfilter):
 
1274
            op = subfilter.keys()[0]
 
1275
            if (op.lower() not in self.complex_operators
 
1276
                    and subfilter[op].keys()[0] in self.timestamp_fields):
 
1277
                field = subfilter[op].keys()[0]
 
1278
                date_time = self._convert_to_datetime(subfilter[op][field])
 
1279
                subfilter[op][field] = date_time
 
1280
 
 
1281
        self._traverse_postorder(filter_expr, replace_isotime)
 
1282
 
 
1283
    def _normalize_field_names_for_db_model(self, filter_expr):
 
1284
        def _normalize_field_names(subfilter):
 
1285
            op = subfilter.keys()[0]
 
1286
            if op.lower() not in self.complex_operators:
 
1287
                self._replace_field_names(subfilter.values()[0])
 
1288
        self._traverse_postorder(filter_expr,
 
1289
                                 _normalize_field_names)
 
1290
 
 
1291
    def _replace_field_names(self, subfilter):
 
1292
        field = subfilter.keys()[0]
 
1293
        value = subfilter[field]
 
1294
        if field in self.name_mapping:
 
1295
            del subfilter[field]
 
1296
            subfilter[self.name_mapping[field]] = value
 
1297
        if field.startswith("metadata."):
 
1298
            del subfilter[field]
 
1299
            subfilter["resource_" + field] = value
 
1300
 
 
1301
    def _convert_operator_to_lower_case(self, filter_expr):
 
1302
        self._traverse_postorder(filter_expr, utils.lowercase_keys)
 
1303
 
 
1304
    @staticmethod
 
1305
    def _convert_to_datetime(isotime):
 
1306
        try:
 
1307
            date_time = timeutils.parse_isotime(isotime)
 
1308
            date_time = date_time.replace(tzinfo=None)
 
1309
            return date_time
 
1310
        except ValueError:
 
1311
            LOG.exception(_("String %s is not a valid isotime") % isotime)
 
1312
            msg = _('Failed to parse the timestamp value %s') % isotime
 
1313
            raise ClientSideError(msg)
 
1314
 
 
1315
    def _validate_filter(self, filter_expr):
 
1316
        jsonschema.validate(filter_expr, self.schema)
 
1317
 
 
1318
    def _validate_orderby(self, orderby_expr):
 
1319
        jsonschema.validate(orderby_expr, self.orderby_schema)
 
1320
 
 
1321
 
991
1322
class Resource(_Base):
992
1323
    """An externally defined object for which samples have been received.
993
1324
    """
1092
1423
    Ownership settings are automatically included based on the Alarm owner.
1093
1424
    """
1094
1425
 
1095
 
    period = wsme.wsattr(BoundedInt(min=1), default=60)
 
1426
    period = wsme.wsattr(wtypes.IntegerType(minimum=1), default=60)
1096
1427
    "The time range in seconds over which query"
1097
1428
 
1098
1429
    comparison_operator = AdvEnum('comparison_operator', str,
1107
1438
                        'count', default='avg')
1108
1439
    "The statistic to compare to the threshold"
1109
1440
 
1110
 
    evaluation_periods = wsme.wsattr(BoundedInt(min=1), default=1)
 
1441
    evaluation_periods = wsme.wsattr(wtypes.IntegerType(minimum=1), default=1)
1111
1442
    "The number of historical periods to evaluate the threshold"
1112
1443
 
 
1444
    exclude_outliers = wsme.wsattr(bool, default=False)
 
1445
    "Whether datapoints with anomolously low sample counts are excluded"
 
1446
 
1113
1447
    def __init__(self, query=None, **kwargs):
1114
1448
        if query:
1115
1449
            query = [Query(**q) for q in query]
1122
1456
        if not threshold_rule.query:
1123
1457
            threshold_rule.query = []
1124
1458
 
1125
 
        timestamp_keys = ['timestamp', 'start', 'start_timestamp' 'end',
1126
 
                          'end_timestamp']
 
1459
        #Timestamp is not allowed for AlarmThresholdRule query, as the alarm
 
1460
        #evaluator will construct timestamp bounds for the sequence of
 
1461
        #statistics queries as the sliding evaluation window advances
 
1462
        #over time.
1127
1463
        _validate_query(threshold_rule.query, storage.SampleFilter.__init__,
1128
 
                        internal_keys=timestamp_keys)
 
1464
                        is_timestamp_valid=False)
1129
1465
        return threshold_rule
1130
1466
 
1131
1467
    @property
1142
1478
    def as_dict(self):
1143
1479
        rule = self.as_dict_from_keys(['period', 'comparison_operator',
1144
1480
                                       'threshold', 'statistic',
1145
 
                                       'evaluation_periods', 'meter_name'])
 
1481
                                       'evaluation_periods', 'meter_name',
 
1482
                                       'exclude_outliers'])
1146
1483
        rule['query'] = [q.as_dict() for q in self.query]
1147
1484
        return rule
1148
1485
 
1169
1506
 
1170
1507
    @property
1171
1508
    def default_description(self):
1172
 
        return _('Combined state of alarms %s') % self.operator.join(
1173
 
            self.alarm_ids)
 
1509
        joiner = ' %s ' % self.operator
 
1510
        return _('Combined state of alarms %s') % joiner.join(self.alarm_ids)
1174
1511
 
1175
1512
    def as_dict(self):
1176
1513
        return self.as_dict_from_keys(['operator', 'alarm_ids'])
1182
1519
                              '153462d0-a9b8-4b5b-8175-9e4b05e9b856'])
1183
1520
 
1184
1521
 
 
1522
class AlarmTimeConstraint(_Base):
 
1523
    """Representation of a time constraint on an alarm."""
 
1524
 
 
1525
    name = wsme.wsattr(wtypes.text, mandatory=True)
 
1526
    "The name of the constraint"
 
1527
 
 
1528
    _description = None  # provide a default
 
1529
 
 
1530
    def get_description(self):
 
1531
        if not self._description:
 
1532
            return 'Time constraint at %s lasting for %s seconds' \
 
1533
                   % (self.start, self.duration)
 
1534
        return self._description
 
1535
 
 
1536
    def set_description(self, value):
 
1537
        self._description = value
 
1538
 
 
1539
    description = wsme.wsproperty(wtypes.text, get_description,
 
1540
                                  set_description)
 
1541
    "The description of the constraint"
 
1542
 
 
1543
    start = wsme.wsattr(CronType(), mandatory=True)
 
1544
    "Start point of the time constraint, in cron format"
 
1545
 
 
1546
    duration = wsme.wsattr(wtypes.IntegerType(minimum=0), mandatory=True)
 
1547
    "How long the constraint should last, in seconds"
 
1548
 
 
1549
    timezone = wsme.wsattr(wtypes.text, default="")
 
1550
    "Timezone of the constraint"
 
1551
 
 
1552
    def as_dict(self):
 
1553
        return self.as_dict_from_keys(['name', 'description', 'start',
 
1554
                                       'duration', 'timezone'])
 
1555
 
 
1556
    @staticmethod
 
1557
    def validate(tc):
 
1558
        if tc.timezone:
 
1559
            try:
 
1560
                pytz.timezone(tc.timezone)
 
1561
            except Exception:
 
1562
                raise ClientSideError(_("Timezone %s is not valid")
 
1563
                                      % tc.timezone)
 
1564
        return tc
 
1565
 
 
1566
    @classmethod
 
1567
    def sample(cls):
 
1568
        return cls(name='SampleConstraint',
 
1569
                   description='nightly build every night at 23h for 3 hours',
 
1570
                   start='0 23 * * *',
 
1571
                   duration=10800,
 
1572
                   timezone='Europe/Ljubljana')
 
1573
 
 
1574
 
1185
1575
class Alarm(_Base):
1186
1576
    """Representation of an alarm.
1187
1577
 
1188
1578
    .. note::
1189
 
        combination_rule and threshold_rule are mutually exclusive.
 
1579
        combination_rule and threshold_rule are mutually exclusive. The *type*
 
1580
        of the alarm should be set to *threshold* or *combination* and the
 
1581
        appropriate rule should be filled.
1190
1582
    """
1191
1583
 
1192
1584
    alarm_id = wtypes.text
1235
1627
    """Describe when to trigger the alarm based on combining the state of
1236
1628
    other alarms"""
1237
1629
 
 
1630
    time_constraints = wtypes.wsattr([AlarmTimeConstraint], default=[])
 
1631
    """Describe time constraints for the alarm"""
 
1632
 
1238
1633
    # These settings are ignored in the PUT or POST operations, but are
1239
1634
    # filled in for GET
1240
1635
    project_id = wtypes.text
1253
1648
    state_timestamp = datetime.datetime
1254
1649
    "The date of the last alarm state changed"
1255
1650
 
1256
 
    def __init__(self, rule=None, **kwargs):
 
1651
    def __init__(self, rule=None, time_constraints=None, **kwargs):
1257
1652
        super(Alarm, self).__init__(**kwargs)
1258
1653
 
1259
1654
        if rule:
1261
1656
                self.threshold_rule = AlarmThresholdRule(**rule)
1262
1657
            elif self.type == 'combination':
1263
1658
                self.combination_rule = AlarmCombinationRule(**rule)
 
1659
        if time_constraints:
 
1660
            self.time_constraints = [AlarmTimeConstraint(**tc)
 
1661
                                     for tc in time_constraints]
1264
1662
 
1265
1663
    @staticmethod
1266
1664
    def validate(alarm):
1291
1689
                alarms = list(pecan.request.storage_conn.get_alarms(
1292
1690
                    alarm_id=id, project=project))
1293
1691
                if not alarms:
1294
 
                    raise ClientSideError(_("Alarm %s doesn't exist") % id)
 
1692
                    raise EntityNotFound(_('Alarm'), id)
1295
1693
 
1296
1694
        return alarm
1297
1695
 
1300
1698
        return cls(alarm_id=None,
1301
1699
                   name="SwiftObjectAlarm",
1302
1700
                   description="An alarm",
1303
 
                   type='threshold',
 
1701
                   type='combination',
1304
1702
                   threshold_rule=None,
1305
 
                   combination_rule=None,
 
1703
                   combination_rule=AlarmCombinationRule.sample(),
 
1704
                   time_constraints=[AlarmTimeConstraint.sample().as_dict()],
1306
1705
                   user_id="c96c887c216949acbdfbd8b494863567",
1307
1706
                   project_id="c96c887c216949acbdfbd8b494863567",
1308
1707
                   enabled=True,
1321
1720
            if k.endswith('_rule'):
1322
1721
                del d[k]
1323
1722
        d['rule'] = getattr(self, "%s_rule" % self.type).as_dict()
 
1723
        d['time_constraints'] = [tc.as_dict() for tc in self.time_constraints]
1324
1724
        return d
1325
1725
 
1326
1726
 
1427
1827
    def put(self, data):
1428
1828
        """Modify this alarm.
1429
1829
 
1430
 
        :param data: a alarm within the request body.
 
1830
        :param data: an alarm within the request body.
1431
1831
        """
1432
1832
        # Ensure alarm exists
1433
1833
        alarm_in = self._alarm()
1501
1901
    def put_state(self, state):
1502
1902
        """Set the state of this alarm.
1503
1903
 
1504
 
        :param state: a alarm state within the request body.
 
1904
        :param state: an alarm state within the request body.
1505
1905
        """
1506
1906
        # note(sileht): body are not validated by wsme
1507
1907
        # Workaround for https://bugs.launchpad.net/wsme/+bug/1227229
1531
1931
 
1532
1932
    @pecan.expose()
1533
1933
    def _lookup(self, alarm_id, *remainder):
1534
 
        if remainder and not remainder[-1]:
1535
 
            remainder = remainder[:-1]
1536
1934
        return AlarmController(alarm_id), remainder
1537
1935
 
1538
1936
    def _record_creation(self, conn, data, alarm_id, now):
1565
1963
    def post(self, data):
1566
1964
        """Create a new alarm.
1567
1965
 
1568
 
        :param data: a alarm within the request body.
 
1966
        :param data: an alarm within the request body.
1569
1967
        """
1570
1968
        conn = pecan.request.storage_conn
1571
1969
        now = timeutils.utcnow()
1589
1987
        alarms = list(conn.get_alarms(name=data.name,
1590
1988
                                      project=data.project_id))
1591
1989
        if alarms:
1592
 
            raise ClientSideError(_("Alarm with that name exists"))
 
1990
            raise ClientSideError(
 
1991
                _("Alarm with name='%s' exists") % data.name,
 
1992
                status_code=409)
1593
1993
 
1594
1994
        try:
1595
1995
            alarm_in = storage.models.Alarm(**change)
1607
2007
 
1608
2008
        :param q: Filter rules for the alarms to be returned.
1609
2009
        """
 
2010
        #Timestamp is not supported field for Simple Alarm queries
1610
2011
        kwargs = _query_to_kwargs(q,
1611
 
                                  pecan.request.storage_conn.get_alarms)
 
2012
                                  pecan.request.storage_conn.get_alarms,
 
2013
                                  is_timestamp_valid=False)
1612
2014
        return [Alarm.from_db_model(m)
1613
2015
                for m in pecan.request.storage_conn.get_alarms(**kwargs)]
1614
2016
 
1724
2126
        usr_limit, proj_limit = acl.get_limited_to(pecan.request.headers)
1725
2127
        # If User and Project are None, you have full access.
1726
2128
        if usr_limit and proj_limit:
1727
 
            raise ClientSideError(_("Not Authorized"), status_code=403)
 
2129
            raise ProjectNotAuthorized(proj_limit)
1728
2130
        return func(*args, **kwargs)
1729
2131
 
1730
2132
    return wrapped
1788
2190
 
1789
2191
    traits = TraitsController()
1790
2192
 
1791
 
    # FIXME(herndon): due to a bug in pecan, making this method
1792
 
    # get_all instead of get will hide the traits subcontroller.
1793
 
    # https://bugs.launchpad.net/pecan/+bug/1262277
 
2193
    @pecan.expose()
 
2194
    def get_one(self, event_type):
 
2195
        pecan.abort(404)
 
2196
 
1794
2197
    @requires_admin
1795
2198
    @wsme_pecan.wsexpose([unicode])
1796
 
    def get(self):
 
2199
    def get_all(self):
1797
2200
        """Get all event types.
1798
2201
        """
1799
2202
        return list(pecan.request.storage_conn.get_event_types())
1841
2244
                     traits=event.traits)
1842
2245
 
1843
2246
 
 
2247
class QuerySamplesController(rest.RestController):
 
2248
    """Provides complex query possibilities for samples
 
2249
    """
 
2250
 
 
2251
    @wsme_pecan.wsexpose([Sample], body=ComplexQuery)
 
2252
    def post(self, body):
 
2253
        """Define query for retrieving Sample data.
 
2254
 
 
2255
        :param body: Query rules for the samples to be returned.
 
2256
        """
 
2257
        query = ValidatedComplexQuery(body,
 
2258
                                      storage.models.Sample,
 
2259
                                      ["user", "project", "resource"],
 
2260
                                      metadata_allowed=True)
 
2261
        query.validate(visibility_field="project_id")
 
2262
        conn = pecan.request.storage_conn
 
2263
        return [Sample.from_db_model(s)
 
2264
                for s in conn.query_samples(query.filter_expr,
 
2265
                                            query.orderby,
 
2266
                                            query.limit)]
 
2267
 
 
2268
 
 
2269
class QueryAlarmHistoryController(rest.RestController):
 
2270
    """Provides complex query possibilites for alarm history
 
2271
    """
 
2272
    @wsme_pecan.wsexpose([AlarmChange], body=ComplexQuery)
 
2273
    def post(self, body):
 
2274
        """Define query for retrieving AlarmChange data.
 
2275
 
 
2276
        :param body: Query rules for the alarm history to be returned.
 
2277
        """
 
2278
        query = ValidatedComplexQuery(body,
 
2279
                                      storage.models.AlarmChange,
 
2280
                                      ["user", "project"])
 
2281
        query.validate(visibility_field="on_behalf_of")
 
2282
        conn = pecan.request.storage_conn
 
2283
        return [AlarmChange.from_db_model(s)
 
2284
                for s in conn.query_alarm_history(query.filter_expr,
 
2285
                                                  query.orderby,
 
2286
                                                  query.limit)]
 
2287
 
 
2288
 
 
2289
class QueryAlarmsController(rest.RestController):
 
2290
    """Provides complex query possibilities for alarms
 
2291
    """
 
2292
    history = QueryAlarmHistoryController()
 
2293
 
 
2294
    @wsme_pecan.wsexpose([Alarm], body=ComplexQuery)
 
2295
    def post(self, body):
 
2296
        """Define query for retrieving Alarm data.
 
2297
 
 
2298
        :param body: Query rules for the alarms to be returned.
 
2299
        """
 
2300
        query = ValidatedComplexQuery(body,
 
2301
                                      storage.models.Alarm,
 
2302
                                      ["user", "project"])
 
2303
        query.validate(visibility_field="project_id")
 
2304
        conn = pecan.request.storage_conn
 
2305
        return [Alarm.from_db_model(s)
 
2306
                for s in conn.query_alarms(query.filter_expr,
 
2307
                                           query.orderby,
 
2308
                                           query.limit)]
 
2309
 
 
2310
 
 
2311
class QueryController(rest.RestController):
 
2312
 
 
2313
    samples = QuerySamplesController()
 
2314
    alarms = QueryAlarmsController()
 
2315
 
 
2316
 
 
2317
def _flatten_capabilities(capabilities):
 
2318
    return dict((k, v) for k, v in utils.recursive_keypairs(capabilities))
 
2319
 
 
2320
 
 
2321
class Capabilities(_Base):
 
2322
    """A representation of the API capabilities, usually constrained
 
2323
       by restrictions imposed by the storage driver.
 
2324
    """
 
2325
 
 
2326
    api = {wtypes.text: bool}
 
2327
    "A flattened dictionary of API capabilities"
 
2328
 
 
2329
    @classmethod
 
2330
    def sample(cls):
 
2331
        return cls(
 
2332
            api=_flatten_capabilities({
 
2333
                'meters': {'pagination': True,
 
2334
                           'query': {'simple': True,
 
2335
                                     'metadata': True,
 
2336
                                     'complex': False}},
 
2337
                'resources': {'pagination': False,
 
2338
                              'query': {'simple': True,
 
2339
                                        'metadata': True,
 
2340
                                        'complex': False}},
 
2341
                'samples': {'pagination': True,
 
2342
                            'groupby': True,
 
2343
                            'query': {'simple': True,
 
2344
                                      'metadata': True,
 
2345
                                      'complex': True}},
 
2346
                'statistics': {'pagination': True,
 
2347
                               'groupby': True,
 
2348
                               'query': {'simple': True,
 
2349
                                         'metadata': True,
 
2350
                                         'complex': False},
 
2351
                               'aggregation': {'standard': True,
 
2352
                                               'selectable': {
 
2353
                                                   'max': True,
 
2354
                                                   'min': True,
 
2355
                                                   'sum': True,
 
2356
                                                   'avg': True,
 
2357
                                                   'count': True,
 
2358
                                                   'stddev': True,
 
2359
                                                   'cardinality': True,
 
2360
                                                   'quartile': False}}},
 
2361
                'alarms': {'query': {'simple': True,
 
2362
                                     'complex': True},
 
2363
                           'history': {'query': {'simple': True,
 
2364
                                                 'complex': True}}},
 
2365
                'events': {'query': {'simple': True}},
 
2366
            })
 
2367
        )
 
2368
 
 
2369
 
 
2370
class CapabilitiesController(rest.RestController):
 
2371
    """Manages capabilities queries.
 
2372
    """
 
2373
 
 
2374
    @wsme_pecan.wsexpose(Capabilities)
 
2375
    def get(self):
 
2376
        # variation in API capabilities is effectively determined by
 
2377
        # the lack of strict feature parity across storage drivers
 
2378
        driver_capabilities = pecan.request.storage_conn.get_capabilities()
 
2379
        return Capabilities(api=_flatten_capabilities(driver_capabilities))
 
2380
 
 
2381
 
1844
2382
class V2Controller(object):
1845
2383
    """Version 2 API controller root."""
1846
2384
 
1850
2388
    alarms = AlarmsController()
1851
2389
    event_types = EventTypesController()
1852
2390
    events = EventsController()
 
2391
    query = QueryController()
 
2392
    capabilities = CapabilitiesController()