2
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
4
# This module is part of SQLAlchemy and is released under
5
# the MIT License: http://www.opensource.org/licenses/mit-license.php
7
"""Define result set constructs including :class:`.ResultProxy`
8
and :class:`.RowProxy."""
11
from itertools import izip
12
from .. import exc, types, util
13
from ..sql import expression
16
# This reconstructor is necessary so that pickles with the C extension or
17
# without use the same Binary format.
19
# We need a different reconstructor on the C extension so that we can
20
# add extra checks that fields have correctly been initialized by
22
from sqlalchemy.cresultproxy import safe_rowproxy_reconstructor
24
# The extra function embedding is needed so that the
25
# reconstructor function has the same signature whether or not
26
# the extension is present.
27
def rowproxy_reconstructor(cls, state):
28
return safe_rowproxy_reconstructor(cls, state)
30
def rowproxy_reconstructor(cls, state):
31
obj = cls.__new__(cls)
32
obj.__setstate__(state)
36
from sqlalchemy.cresultproxy import BaseRowProxy
38
class BaseRowProxy(object):
39
__slots__ = ('_parent', '_row', '_processors', '_keymap')
41
def __init__(self, parent, row, processors, keymap):
42
"""RowProxy objects are constructed by ResultProxy objects."""
46
self._processors = processors
50
return (rowproxy_reconstructor,
51
(self.__class__, self.__getstate__()))
54
"""Return the values represented by this RowProxy as a list."""
58
for processor, value in izip(self._processors, self._row):
62
yield processor(value)
67
def __getitem__(self, key):
69
processor, obj, index = self._keymap[key]
71
processor, obj, index = self._parent._key_fallback(key)
73
if isinstance(key, slice):
75
for processor, value in izip(self._processors[key],
80
l.append(processor(value))
85
raise exc.InvalidRequestError(
86
"Ambiguous column name '%s' in result set! "
87
"try 'use_labels' option on select statement." % key)
88
if processor is not None:
89
return processor(self._row[index])
91
return self._row[index]
93
def __getattr__(self, name):
97
raise AttributeError(e.args[0])
100
class RowProxy(BaseRowProxy):
101
"""Proxy values from a single cursor row.
103
Mostly follows "ordered dictionary" behavior, mapping result
104
values to the string-based column name, the integer position of
105
the result in the row, as well as Column instances which can be
106
mapped to the original Columns that produced this result set (for
107
results that correspond to constructed SQL expressions).
111
def __contains__(self, key):
112
return self._parent._has_key(self._row, key)
114
def __getstate__(self):
116
'_parent': self._parent,
120
def __setstate__(self, state):
121
self._parent = parent = state['_parent']
122
self._row = state['_row']
123
self._processors = parent._processors
124
self._keymap = parent._keymap
128
def __eq__(self, other):
129
return other is self or other == tuple(self)
131
def __ne__(self, other):
132
return not self.__eq__(other)
135
return repr(tuple(self))
137
def has_key(self, key):
138
"""Return True if this RowProxy contains the given key."""
140
return self._parent._has_key(self._row, key)
143
"""Return a list of tuples, each tuple containing a key/value pair."""
144
# TODO: no coverage here
145
return [(key, self[key]) for key in self.iterkeys()]
148
"""Return the list of keys as strings represented by this RowProxy."""
150
return self._parent.keys
153
return iter(self._parent.keys)
155
def itervalues(self):
159
# Register RowProxy with Sequence,
160
# so sequence protocol is implemented
161
from collections import Sequence
162
Sequence.register(RowProxy)
167
class ResultMetaData(object):
168
"""Handle cursor.description, applying additional info from an execution
171
def __init__(self, parent, metadata):
172
self._processors = processors = []
174
# We do not strictly need to store the processor in the key mapping,
175
# though it is faster in the Python version (probably because of the
176
# saved attribute lookup self._processors)
177
self._keymap = keymap = {}
179
context = parent.context
180
dialect = context.dialect
181
typemap = dialect.dbapi_type_map
182
translate_colname = context._translate_colname
183
self.case_sensitive = dialect.case_sensitive
185
# high precedence key values.
188
for i, rec in enumerate(metadata):
192
if dialect.description_encoding:
193
colname = dialect._description_decoder(colname)
195
if translate_colname:
196
colname, untranslated = translate_colname(colname)
198
if dialect.requires_name_normalize:
199
colname = dialect.normalize_name(colname)
201
if context.result_map:
203
name, obj, type_ = context.result_map[colname
204
if self.case_sensitive
205
else colname.lower()]
208
colname, None, typemap.get(coltype, types.NULLTYPE)
211
colname, None, typemap.get(coltype, types.NULLTYPE)
213
processor = context.get_result_processor(type_, colname, coltype)
215
processors.append(processor)
216
rec = (processor, obj, i)
218
# indexes as keys. This is only needed for the Python version of
219
# RowProxy (the C version uses a faster path for integer indexes).
220
primary_keymap[i] = rec
222
# populate primary keymap, looking for conflicts.
223
if primary_keymap.setdefault(
224
name if self.case_sensitive
227
# place a record that doesn't have the "index" - this
228
# is interpreted later as an AmbiguousColumnError,
229
# but only when actually accessed. Columns
230
# colliding by name is not a problem if those names
231
# aren't used; integer access is always
234
if self.case_sensitive
235
else name.lower()] = rec = (None, obj, None)
237
self.keys.append(colname)
241
# technically we should be doing this but we
242
# are saving on callcounts by not doing so.
243
# if keymap.setdefault(o, rec) is not rec:
244
# keymap[o] = (None, obj, None)
246
if translate_colname and \
248
keymap[untranslated] = rec
250
# overwrite keymap values with those of the
251
# high precedence keymap.
252
keymap.update(primary_keymap)
255
context.engine.logger.debug(
256
"Col %r", tuple(x[0] for x in metadata))
258
@util.pending_deprecation("0.8", "sqlite dialect uses "
259
"_translate_colname() now")
260
def _set_keymap_synonym(self, name, origname):
261
"""Set a synonym for the given name.
263
Some dialects (SQLite at the moment) may use this to
264
adjust the column names that are significant within a
268
rec = (processor, obj, i) = self._keymap[origname if
270
else origname.lower()]
271
if self._keymap.setdefault(name, rec) is not rec:
272
self._keymap[name] = (processor, obj, None)
274
def _key_fallback(self, key, raiseerr=True):
277
if isinstance(key, basestring):
278
result = map.get(key if self.case_sensitive else key.lower())
279
# fallback for targeting a ColumnElement to a textual expression
280
# this is a rare use case which only occurs when matching text()
281
# or colummn('name') constructs to ColumnElements, or after a
282
# pickle/unpickle roundtrip
283
elif isinstance(key, expression.ColumnElement):
286
if self.case_sensitive
287
else key._label.lower()) in map:
288
result = map[key._label
289
if self.case_sensitive
290
else key._label.lower()]
291
elif hasattr(key, 'name') and (
293
if self.case_sensitive
294
else key.name.lower()) in map:
295
# match is only on name.
296
result = map[key.name
297
if self.case_sensitive
298
else key.name.lower()]
299
# search extra hard to make sure this
300
# isn't a column/label name overlap.
301
# this check isn't currently available if the row
303
if result is not None and \
304
result[1] is not None:
305
for obj in result[1]:
306
if key._compare_name_for_result(obj):
312
raise exc.NoSuchColumnError(
313
"Could not locate column in row for column '%s'" %
314
expression._string_or_unprintable(key))
321
def _has_key(self, row, key):
322
if key in self._keymap:
325
return self._key_fallback(key, False) is not None
327
def __getstate__(self):
329
'_pickled_keymap': dict(
331
for key, (processor, obj, index) in self._keymap.iteritems()
332
if isinstance(key, (basestring, int))
335
"case_sensitive": self.case_sensitive,
338
def __setstate__(self, state):
339
# the row has been processed at pickling time so we don't need any
341
self._processors = [None for _ in xrange(len(state['keys']))]
342
self._keymap = keymap = {}
343
for key, index in state['_pickled_keymap'].iteritems():
344
# not preserving "obj" here, unfortunately our
345
# proxy comparison fails with the unpickle
346
keymap[key] = (None, None, index)
347
self.keys = state['keys']
348
self.case_sensitive = state['case_sensitive']
352
class ResultProxy(object):
353
"""Wraps a DB-API cursor object to provide easier access to row columns.
355
Individual columns may be accessed by their integer position,
356
case-insensitive column name, or by ``schema.Column``
361
col1 = row[0] # access via integer position
363
col2 = row['col2'] # access via name
365
col3 = row[mytable.c.mycol] # access via Column object.
367
``ResultProxy`` also handles post-processing of result column
368
data using ``TypeEngine`` objects, which are referenced from
369
the originating SQL statement that produced this result set.
373
_process_row = RowProxy
374
out_parameters = None
375
_can_close_connection = False
378
def __init__(self, context):
379
self.context = context
380
self.dialect = context.dialect
382
self.cursor = self._saved_cursor = context.cursor
383
self.connection = context.root_connection
384
self._echo = self.connection._echo and \
385
context.engine._should_log_debug()
386
self._init_metadata()
388
def _init_metadata(self):
389
metadata = self._cursor_description()
390
if metadata is not None:
391
self._metadata = ResultMetaData(self, metadata)
394
"""Return the current set of string keys for rows."""
396
return self._metadata.keys
400
@util.memoized_property
402
"""Return the 'rowcount' for this result.
404
The 'rowcount' reports the number of rows *matched*
405
by the WHERE criterion of an UPDATE or DELETE statement.
409
Notes regarding :attr:`.ResultProxy.rowcount`:
412
* This attribute returns the number of rows *matched*,
413
which is not necessarily the same as the number of rows
414
that were actually *modified* - an UPDATE statement, for example,
415
may have no net change on a given row if the SET values
416
given are the same as those present in the row already.
417
Such a row would be matched but not modified.
418
On backends that feature both styles, such as MySQL,
419
rowcount is configured by default to return the match
422
* :attr:`.ResultProxy.rowcount` is *only* useful in conjunction
423
with an UPDATE or DELETE statement. Contrary to what the Python
424
DBAPI says, it does *not* return the
425
number of rows available from the results of a SELECT statement
426
as DBAPIs cannot support this functionality when rows are
429
* :attr:`.ResultProxy.rowcount` may not be fully implemented by
430
all dialects. In particular, most DBAPIs do not support an
431
aggregate rowcount result from an executemany call.
432
The :meth:`.ResultProxy.supports_sane_rowcount` and
433
:meth:`.ResultProxy.supports_sane_multi_rowcount` methods
434
will report from the dialect if each usage is known to be
437
* Statements that use RETURNING may not return a correct
442
return self.context.rowcount
444
self.connection._handle_dbapi_exception(
445
e, None, None, self.cursor, self.context)
449
"""return the 'lastrowid' accessor on the DBAPI cursor.
451
This is a DBAPI specific method and is only functional
452
for those backends which support it, for statements
453
where it is appropriate. It's behavior is not
454
consistent across backends.
456
Usage of this method is normally unnecessary when
457
using insert() expression constructs; the
458
:attr:`~ResultProxy.inserted_primary_key` attribute provides a
459
tuple of primary key values for a newly inserted row,
460
regardless of database backend.
464
return self._saved_cursor.lastrowid
466
self.connection._handle_dbapi_exception(
468
self._saved_cursor, self.context)
471
def returns_rows(self):
472
"""True if this :class:`.ResultProxy` returns rows.
474
I.e. if it is legal to call the methods
475
:meth:`~.ResultProxy.fetchone`,
476
:meth:`~.ResultProxy.fetchmany`
477
:meth:`~.ResultProxy.fetchall`.
480
return self._metadata is not None
484
"""True if this :class:`.ResultProxy` is the result
485
of a executing an expression language compiled
486
:func:`.expression.insert` construct.
488
When True, this implies that the
489
:attr:`inserted_primary_key` attribute is accessible,
490
assuming the statement did not include
491
a user defined "returning" construct.
494
return self.context.isinsert
496
def _cursor_description(self):
497
"""May be overridden by subclasses."""
499
return self._saved_cursor.description
501
def close(self, _autoclose_connection=True):
502
"""Close this ResultProxy.
504
Closes the underlying DBAPI cursor corresponding to the execution.
506
Note that any data cached within this ResultProxy is still available.
507
For some types of results, this may include buffered rows.
509
If this ResultProxy was generated from an implicit execution,
510
the underlying Connection will also be closed (returns the
511
underlying DBAPI connection to the connection pool.)
513
This method is called automatically when:
515
* all result rows are exhausted using the fetchXXX() methods.
516
* cursor.description is None.
522
self.connection._safe_close_cursor(self.cursor)
523
if _autoclose_connection and \
524
self.connection.should_close_with_result:
525
self.connection.close()
526
# allow consistent errors
531
row = self.fetchone()
537
@util.memoized_property
538
def inserted_primary_key(self):
539
"""Return the primary key for the row just inserted.
541
The return value is a list of scalar values
542
corresponding to the list of primary key columns
545
This only applies to single row :func:`.insert`
546
constructs which did not explicitly specify
547
:meth:`.Insert.returning`.
549
Note that primary key columns which specify a
550
server_default clause,
551
or otherwise do not qualify as "autoincrement"
552
columns (see the notes at :class:`.Column`), and were
553
generated using the database-side default, will
554
appear in this list as ``None`` unless the backend
555
supports "returning" and the insert statement executed
556
with the "implicit returning" enabled.
558
Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
559
statement is not a compiled expression construct
560
or is not an insert() construct.
564
if not self.context.compiled:
565
raise exc.InvalidRequestError(
566
"Statement is not a compiled "
567
"expression construct.")
568
elif not self.context.isinsert:
569
raise exc.InvalidRequestError(
570
"Statement is not an insert() "
571
"expression construct.")
572
elif self.context._is_explicit_returning:
573
raise exc.InvalidRequestError(
574
"Can't call inserted_primary_key "
578
return self.context.inserted_primary_key
580
def last_updated_params(self):
581
"""Return the collection of updated parameters from this
584
Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
585
statement is not a compiled expression construct
586
or is not an update() construct.
589
if not self.context.compiled:
590
raise exc.InvalidRequestError(
591
"Statement is not a compiled "
592
"expression construct.")
593
elif not self.context.isupdate:
594
raise exc.InvalidRequestError(
595
"Statement is not an update() "
596
"expression construct.")
597
elif self.context.executemany:
598
return self.context.compiled_parameters
600
return self.context.compiled_parameters[0]
602
def last_inserted_params(self):
603
"""Return the collection of inserted parameters from this
606
Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
607
statement is not a compiled expression construct
608
or is not an insert() construct.
611
if not self.context.compiled:
612
raise exc.InvalidRequestError(
613
"Statement is not a compiled "
614
"expression construct.")
615
elif not self.context.isinsert:
616
raise exc.InvalidRequestError(
617
"Statement is not an insert() "
618
"expression construct.")
619
elif self.context.executemany:
620
return self.context.compiled_parameters
622
return self.context.compiled_parameters[0]
624
def lastrow_has_defaults(self):
625
"""Return ``lastrow_has_defaults()`` from the underlying
626
:class:`.ExecutionContext`.
628
See :class:`.ExecutionContext` for details.
632
return self.context.lastrow_has_defaults()
634
def postfetch_cols(self):
635
"""Return ``postfetch_cols()`` from the underlying
636
:class:`.ExecutionContext`.
638
See :class:`.ExecutionContext` for details.
640
Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
641
statement is not a compiled expression construct
642
or is not an insert() or update() construct.
646
if not self.context.compiled:
647
raise exc.InvalidRequestError(
648
"Statement is not a compiled "
649
"expression construct.")
650
elif not self.context.isinsert and not self.context.isupdate:
651
raise exc.InvalidRequestError(
652
"Statement is not an insert() or update() "
653
"expression construct.")
654
return self.context.postfetch_cols
656
def prefetch_cols(self):
657
"""Return ``prefetch_cols()`` from the underlying
658
:class:`.ExecutionContext`.
660
See :class:`.ExecutionContext` for details.
662
Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
663
statement is not a compiled expression construct
664
or is not an insert() or update() construct.
668
if not self.context.compiled:
669
raise exc.InvalidRequestError(
670
"Statement is not a compiled "
671
"expression construct.")
672
elif not self.context.isinsert and not self.context.isupdate:
673
raise exc.InvalidRequestError(
674
"Statement is not an insert() or update() "
675
"expression construct.")
676
return self.context.prefetch_cols
678
def supports_sane_rowcount(self):
679
"""Return ``supports_sane_rowcount`` from the dialect.
681
See :attr:`.ResultProxy.rowcount` for background.
685
return self.dialect.supports_sane_rowcount
687
def supports_sane_multi_rowcount(self):
688
"""Return ``supports_sane_multi_rowcount`` from the dialect.
690
See :attr:`.ResultProxy.rowcount` for background.
694
return self.dialect.supports_sane_multi_rowcount
696
def _fetchone_impl(self):
698
return self.cursor.fetchone()
699
except AttributeError:
702
def _fetchmany_impl(self, size=None):
705
return self.cursor.fetchmany()
707
return self.cursor.fetchmany(size)
708
except AttributeError:
711
def _fetchall_impl(self):
713
return self.cursor.fetchall()
714
except AttributeError:
717
def _non_result(self):
718
if self._metadata is None:
719
raise exc.ResourceClosedError(
720
"This result object does not return rows. "
721
"It has been closed automatically.",
724
raise exc.ResourceClosedError("This result object is closed.")
726
def process_rows(self, rows):
727
process_row = self._process_row
728
metadata = self._metadata
729
keymap = metadata._keymap
730
processors = metadata._processors
732
log = self.context.engine.logger.debug
736
l.append(process_row(metadata, row, processors, keymap))
739
return [process_row(metadata, row, processors, keymap)
743
"""Fetch all rows, just like DB-API ``cursor.fetchall()``."""
746
l = self.process_rows(self._fetchall_impl())
750
self.connection._handle_dbapi_exception(
752
self.cursor, self.context)
754
def fetchmany(self, size=None):
755
"""Fetch many rows, just like DB-API
756
``cursor.fetchmany(size=cursor.arraysize)``.
758
If rows are present, the cursor remains open after this is called.
759
Else the cursor is automatically closed and an empty list is returned.
764
l = self.process_rows(self._fetchmany_impl(size))
769
self.connection._handle_dbapi_exception(
771
self.cursor, self.context)
774
"""Fetch one row, just like DB-API ``cursor.fetchone()``.
776
If a row is present, the cursor remains open after this is called.
777
Else the cursor is automatically closed and None is returned.
781
row = self._fetchone_impl()
783
return self.process_rows([row])[0]
788
self.connection._handle_dbapi_exception(
790
self.cursor, self.context)
793
"""Fetch the first row and then close the result set unconditionally.
795
Returns None if no row is present.
798
if self._metadata is None:
802
row = self._fetchone_impl()
804
self.connection._handle_dbapi_exception(
806
self.cursor, self.context)
810
return self.process_rows([row])[0]
817
"""Fetch the first column of the first row, and close the result set.
819
Returns None if no row is present.
829
class BufferedRowResultProxy(ResultProxy):
830
"""A ResultProxy with row buffering behavior.
832
``ResultProxy`` that buffers the contents of a selection of rows
833
before ``fetchone()`` is called. This is to allow the results of
834
``cursor.description`` to be available immediately, when
835
interfacing with a DB-API that requires rows to be consumed before
836
this information is available (currently psycopg2, when used with
837
server-side cursors).
839
The pre-fetching behavior fetches only one row initially, and then
840
grows its buffer size by a fixed amount with each successive need
841
for additional rows up to a size of 100.
844
def _init_metadata(self):
846
super(BufferedRowResultProxy, self)._init_metadata()
848
# this is a "growth chart" for the buffering of rows.
849
# each successive __buffer_rows call will use the next
850
# value in the list for the buffer size until the max
863
def __buffer_rows(self):
864
size = getattr(self, '_bufsize', 1)
865
self.__rowbuffer = collections.deque(self.cursor.fetchmany(size))
866
self._bufsize = self.size_growth.get(size, size)
868
def _fetchone_impl(self):
871
if not self.__rowbuffer:
873
if not self.__rowbuffer:
875
return self.__rowbuffer.popleft()
877
def _fetchmany_impl(self, size=None):
879
return self._fetchall_impl()
881
for x in range(0, size):
882
row = self._fetchone_impl()
888
def _fetchall_impl(self):
889
self.__rowbuffer.extend(self.cursor.fetchall())
890
ret = self.__rowbuffer
891
self.__rowbuffer = collections.deque()
895
class FullyBufferedResultProxy(ResultProxy):
896
"""A result proxy that buffers rows fully upon creation.
898
Used for operations where a result is to be delivered
899
after the database conversation can not be continued,
900
such as MSSQL INSERT...OUTPUT after an autocommit.
903
def _init_metadata(self):
904
super(FullyBufferedResultProxy, self)._init_metadata()
905
self.__rowbuffer = self._buffer_rows()
907
def _buffer_rows(self):
908
return collections.deque(self.cursor.fetchall())
910
def _fetchone_impl(self):
912
return self.__rowbuffer.popleft()
916
def _fetchmany_impl(self, size=None):
918
return self._fetchall_impl()
920
for x in range(0, size):
921
row = self._fetchone_impl()
927
def _fetchall_impl(self):
928
ret = self.__rowbuffer
929
self.__rowbuffer = collections.deque()
933
class BufferedColumnRow(RowProxy):
934
def __init__(self, parent, row, processors, keymap):
937
# this is a tad faster than using enumerate
939
for processor in parent._orig_processors:
940
if processor is not None:
941
row[index] = processor(row[index])
944
super(BufferedColumnRow, self).__init__(parent, row,
948
class BufferedColumnResultProxy(ResultProxy):
949
"""A ResultProxy with column buffering behavior.
951
``ResultProxy`` that loads all columns into memory each time
952
fetchone() is called. If fetchmany() or fetchall() are called,
953
the full grid of results is fetched. This is to operate with
954
databases where result rows contain "live" results that fall out
955
of scope unless explicitly fetched. Currently this includes
956
cx_Oracle LOB objects.
960
_process_row = BufferedColumnRow
962
def _init_metadata(self):
963
super(BufferedColumnResultProxy, self)._init_metadata()
964
metadata = self._metadata
965
# orig_processors will be used to preprocess each row when they are
967
metadata._orig_processors = metadata._processors
968
# replace the all type processors by None processors.
969
metadata._processors = [None for _ in xrange(len(metadata.keys))]
971
for k, (func, obj, index) in metadata._keymap.iteritems():
972
keymap[k] = (None, obj, index)
973
self._metadata._keymap = keymap
976
# can't call cursor.fetchall(), since rows must be
977
# fully processed before requesting more from the DBAPI.
980
row = self.fetchone()
986
def fetchmany(self, size=None):
987
# can't call cursor.fetchmany(), since rows must be
988
# fully processed before requesting more from the DBAPI.
990
return self.fetchall()
992
for i in xrange(size):
993
row = self.fetchone()