2
# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors <see AUTHORS file>
2
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
4
4
# This module is part of SQLAlchemy and is released under
5
5
# the MIT License: http://www.opensource.org/licenses/mit-license.php
8
"""Basic components for SQL execution and interfacing with DB-API.
8
"""Defines :class:`.Connection` and :class:`.Engine`.
10
Defines the basic components used to interface DB-API modules with
11
higher-level statement-construction, connection-management, execution
16
'BufferedColumnResultProxy', 'BufferedColumnRow',
17
'BufferedRowResultProxy','Compiled', 'Connectable', 'Connection',
18
'Dialect', 'Engine','ExecutionContext', 'NestedTransaction',
19
'ResultProxy', 'RootTransaction','RowProxy', 'SchemaIterator',
20
'StringIO', 'Transaction', 'TwoPhaseTransaction',
23
import inspect, StringIO, sys, operator
24
from itertools import izip
25
from sqlalchemy import exc, schema, util, types, log, interfaces, \
27
from sqlalchemy.sql import expression, util as sql_util
28
from sqlalchemy import processors
31
class Dialect(object):
32
"""Define the behavior of a specific database and DB-API combination.
34
Any aspect of metadata definition, SQL query generation,
35
execution, result-set handling, or anything else which varies
36
between databases is defined under the general category of the
37
Dialect. The Dialect acts as a factory for other
38
database-specific object implementations including
39
ExecutionContext, Compiled, DefaultGenerator, and TypeEngine.
41
All Dialects implement the following attributes:
44
identifying name for the dialect from a DBAPI-neutral point of view
48
identifying name for the dialect's DBAPI
51
True if the paramstyle for this Dialect is positional.
54
the paramstyle to be used (some DB-APIs support multiple
58
True if Unicode conversion should be applied to all ``str``
62
type of encoding to use for unicode, usually defaults to
66
a :class:`~Compiled` class used to compile SQL statements
69
a :class:`~Compiled` class used to compile DDL statements
72
a tuple containing a version number for the DB backend in use.
73
This value is only available for supporting dialects, and is
74
typically populated during the initial connection to the database.
77
the name of the default schema. This value is only available for
78
supporting dialects, and is typically populated during the
79
initial connection to the database.
82
a :class:`.ExecutionContext` class used to handle statement execution
84
execute_sequence_format
85
either the 'tuple' or 'list' type, depending on what cursor.execute()
86
accepts for the second argument (they vary).
89
a :class:`~sqlalchemy.sql.compiler.IdentifierPreparer` class used to
93
``True`` if the database supports ``ALTER TABLE``.
96
The maximum length of identifier names.
98
supports_unicode_statements
99
Indicate whether the DB-API can receive SQL statements as Python
102
supports_unicode_binds
103
Indicate whether the DB-API can receive string bind parameters
104
as Python unicode strings
106
supports_sane_rowcount
107
Indicate whether the dialect properly implements rowcount for
108
``UPDATE`` and ``DELETE`` statements.
110
supports_sane_multi_rowcount
111
Indicate whether the dialect properly implements rowcount for
112
``UPDATE`` and ``DELETE`` statements when executed via
115
preexecute_autoincrement_sequences
116
True if 'implicit' primary key functions must be executed separately
117
in order to get their value. This is currently oriented towards
121
use RETURNING or equivalent during INSERT execution in order to load
122
newly generated primary keys and other column defaults in one execution,
123
which are then available via inserted_primary_key.
124
If an insert statement has returning() specified explicitly,
125
the "implicit" functionality is not used and inserted_primary_key
126
will not be available.
129
A mapping of DB-API type objects present in this Dialect's
130
DB-API implementation mapped to TypeEngine implementations used
133
This is used to apply types to result sets based on the DB-API
134
types present in cursor.description; it only takes effect for
135
result sets against textual statements where no explicit
139
A dictionary of TypeEngine classes from sqlalchemy.types mapped
140
to subclasses that are specific to the dialect class. This
141
dictionary is class-level only and is not accessed from the
142
dialect instance itself.
144
supports_default_values
145
Indicates if the construct ``INSERT INTO tablename DEFAULT
146
VALUES`` is supported
149
Indicates if the dialect supports CREATE SEQUENCE or similar.
152
If True, indicates if the "optional" flag on the Sequence() construct
153
should signal to not generate a CREATE SEQUENCE. Applies only to
154
dialects that support sequences. Currently used only to allow Postgresql
155
SERIAL to be used on a column that specifies Sequence() for usage on
159
Indicates if the dialect supports a native ENUM construct.
160
This will prevent types.Enum from generating a CHECK
161
constraint when that type is used.
163
supports_native_boolean
164
Indicates if the dialect supports a native boolean construct.
165
This will prevent types.Boolean from generating a CHECK
166
constraint when that type is used.
170
def create_connect_args(self, url):
171
"""Build DB-API compatible connection arguments.
173
Given a :class:`~sqlalchemy.engine.url.URL` object, returns a tuple
174
consisting of a `*args`/`**kwargs` suitable to send directly
175
to the dbapi's connect function.
179
raise NotImplementedError()
182
def type_descriptor(cls, typeobj):
183
"""Transform a generic type to a dialect-specific type.
185
Dialect classes will usually use the
186
:func:`~sqlalchemy.types.adapt_type` function in the types module to
189
The returned result is cached *per dialect class* so can
190
contain no dialect-instance state.
194
raise NotImplementedError()
196
def initialize(self, connection):
197
"""Called during strategized creation of the dialect with a
200
Allows dialects to configure options based on server version info or
203
The connection passed here is a SQLAlchemy Connection object,
204
with full capabilities.
206
The initalize() method of the base dialect should be called via
213
def reflecttable(self, connection, table, include_columns=None):
214
"""Load table description from the database.
216
Given a :class:`.Connection` and a
217
:class:`~sqlalchemy.schema.Table` object, reflect its columns and
218
properties from the database. If include_columns (a list or
219
set) is specified, limit the autoload to the given column
222
The default implementation uses the
223
:class:`~sqlalchemy.engine.reflection.Inspector` interface to
224
provide the output, building upon the granular table/column/
225
constraint etc. methods of :class:`.Dialect`.
229
raise NotImplementedError()
231
def get_columns(self, connection, table_name, schema=None, **kw):
232
"""Return information about columns in `table_name`.
234
Given a :class:`.Connection`, a string
235
`table_name`, and an optional string `schema`, return column
236
information as a list of dictionaries with these keys:
242
[sqlalchemy.types#TypeEngine]
248
the column's default value
254
a dictionary of the form
255
{'name' : str, 'start' :int, 'increment': int}
257
Additional column attributes may be present.
260
raise NotImplementedError()
262
def get_primary_keys(self, connection, table_name, schema=None, **kw):
263
"""Return information about primary keys in `table_name`.
265
Given a :class:`.Connection`, a string
266
`table_name`, and an optional string `schema`, return primary
267
key information as a list of column names.
270
raise NotImplementedError()
272
def get_pk_constraint(self, table_name, schema=None, **kw):
273
"""Return information about the primary key constraint on
276
Given a string `table_name`, and an optional string `schema`, return
277
primary key information as a dictionary with these keys:
280
a list of column names that make up the primary key
283
optional name of the primary key constraint.
286
raise NotImplementedError()
288
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
289
"""Return information about foreign_keys in `table_name`.
291
Given a :class:`.Connection`, a string
292
`table_name`, and an optional string `schema`, return foreign
293
key information as a list of dicts with these keys:
296
the constraint's name
299
a list of column names that make up the foreign key
302
the name of the referred schema
305
the name of the referred table
308
a list of column names in the referred table that correspond to
312
raise NotImplementedError()
314
def get_table_names(self, connection, schema=None, **kw):
315
"""Return a list of table names for `schema`."""
317
raise NotImplementedError
319
def get_view_names(self, connection, schema=None, **kw):
320
"""Return a list of all view names available in the database.
323
Optional, retrieve names from a non-default schema.
326
raise NotImplementedError()
328
def get_view_definition(self, connection, view_name, schema=None, **kw):
329
"""Return view definition.
331
Given a :class:`.Connection`, a string
332
`view_name`, and an optional string `schema`, return the view
336
raise NotImplementedError()
338
def get_indexes(self, connection, table_name, schema=None, **kw):
339
"""Return information about indexes in `table_name`.
341
Given a :class:`.Connection`, a string
342
`table_name` and an optional string `schema`, return index
343
information as a list of dictionaries with these keys:
349
list of column names in order
355
raise NotImplementedError()
357
def normalize_name(self, name):
358
"""convert the given name to lowercase if it is detected as
361
this method is only used if the dialect defines
362
requires_name_normalize=True.
365
raise NotImplementedError()
367
def denormalize_name(self, name):
368
"""convert the given name to a case insensitive identifier
369
for the backend if it is an all-lowercase name.
371
this method is only used if the dialect defines
372
requires_name_normalize=True.
375
raise NotImplementedError()
377
def has_table(self, connection, table_name, schema=None):
378
"""Check the existence of a particular table in the database.
380
Given a :class:`.Connection` object and a string
381
`table_name`, return True if the given table (possibly within
382
the specified `schema`) exists in the database, False
386
raise NotImplementedError()
388
def has_sequence(self, connection, sequence_name, schema=None):
389
"""Check the existence of a particular sequence in the database.
391
Given a :class:`.Connection` object and a string
392
`sequence_name`, return True if the given sequence exists in
393
the database, False otherwise.
396
raise NotImplementedError()
398
def _get_server_version_info(self, connection):
399
"""Retrieve the server version info from the given connection.
401
This is used by the default implementation to populate the
402
"server_version_info" attribute and is called exactly
403
once upon first connect.
407
raise NotImplementedError()
409
def _get_default_schema_name(self, connection):
410
"""Return the string name of the currently selected schema from
411
the given connection.
413
This is used by the default implementation to populate the
414
"default_schema_name" attribute and is called exactly
415
once upon first connect.
419
raise NotImplementedError()
421
def do_begin(self, connection):
422
"""Provide an implementation of *connection.begin()*, given a
423
DB-API connection."""
425
raise NotImplementedError()
427
def do_rollback(self, connection):
428
"""Provide an implementation of *connection.rollback()*, given
429
a DB-API connection."""
431
raise NotImplementedError()
433
def create_xid(self):
434
"""Create a two-phase transaction ID.
436
This id will be passed to do_begin_twophase(),
437
do_rollback_twophase(), do_commit_twophase(). Its format is
441
raise NotImplementedError()
443
def do_commit(self, connection):
444
"""Provide an implementation of *connection.commit()*, given a
445
DB-API connection."""
447
raise NotImplementedError()
449
def do_savepoint(self, connection, name):
450
"""Create a savepoint with the given name on a SQLAlchemy
453
raise NotImplementedError()
455
def do_rollback_to_savepoint(self, connection, name):
456
"""Rollback a SQL Alchemy connection to the named savepoint."""
458
raise NotImplementedError()
460
def do_release_savepoint(self, connection, name):
461
"""Release the named savepoint on a SQL Alchemy connection."""
463
raise NotImplementedError()
465
def do_begin_twophase(self, connection, xid):
466
"""Begin a two phase transaction on the given connection."""
468
raise NotImplementedError()
470
def do_prepare_twophase(self, connection, xid):
471
"""Prepare a two phase transaction on the given connection."""
473
raise NotImplementedError()
475
def do_rollback_twophase(self, connection, xid, is_prepared=True,
477
"""Rollback a two phase transaction on the given connection."""
479
raise NotImplementedError()
481
def do_commit_twophase(self, connection, xid, is_prepared=True,
483
"""Commit a two phase transaction on the given connection."""
485
raise NotImplementedError()
487
def do_recover_twophase(self, connection):
488
"""Recover list of uncommited prepared two phase transaction
489
identifiers on the given connection."""
491
raise NotImplementedError()
493
def do_executemany(self, cursor, statement, parameters, context=None):
494
"""Provide an implementation of ``cursor.executemany(statement,
497
raise NotImplementedError()
499
def do_execute(self, cursor, statement, parameters, context=None):
500
"""Provide an implementation of ``cursor.execute(statement,
503
raise NotImplementedError()
505
def do_execute_no_params(self, cursor, statement, parameters, context=None):
506
"""Provide an implementation of ``cursor.execute(statement)``.
508
The parameter collection should not be sent.
512
raise NotImplementedError()
514
def is_disconnect(self, e, connection, cursor):
515
"""Return True if the given DB-API error indicates an invalid
518
raise NotImplementedError()
521
"""return a callable which sets up a newly created DBAPI connection.
523
The callable accepts a single argument "conn" which is the
524
DBAPI connection itself. It has no return value.
526
This is used to set dialect-wide per-connection options such as
527
isolation modes, unicode modes, etc.
529
If a callable is returned, it will be assembled into a pool listener
530
that receives the direct DBAPI connection, with all wrappers removed.
532
If None is returned, no listener will be generated.
537
def reset_isolation_level(self, dbapi_conn):
538
"""Given a DBAPI connection, revert its isolation to the default."""
540
raise NotImplementedError()
542
def set_isolation_level(self, dbapi_conn, level):
543
"""Given a DBAPI connection, set its isolation level."""
545
raise NotImplementedError()
547
def get_isolation_level(self, dbapi_conn):
548
"""Given a DBAPI connection, return its isolation level."""
550
raise NotImplementedError()
553
class ExecutionContext(object):
554
"""A messenger object for a Dialect that corresponds to a single
557
ExecutionContext should have these data members:
560
Connection object which can be freely used by default value
561
generators to execute SQL. This Connection should reference the
562
same underlying connection/transactional resources of
566
Connection object which is the source of this ExecutionContext. This
567
Connection may have close_with_result=True set, in which case it can
571
dialect which created this ExecutionContext.
574
DB-API cursor procured from the connection,
577
if passed to constructor, sqlalchemy.engine.base.Compiled object
581
string version of the statement to be executed. Is either
582
passed to the constructor, or must be created from the
583
sql.Compiled object by the time pre_exec() has completed.
586
bind parameters passed to the execute() method. For compiled
587
statements, this is a dictionary or list of dictionaries. For
588
textual statements, it should be in a format suitable for the
589
dialect's paramstyle (i.e. dict or list of dicts for non
590
positional, list or list of lists/tuples for positional).
593
True if the statement is an INSERT.
596
True if the statement is an UPDATE.
599
True if the statement is a "committable" statement.
602
a list of Column objects for which a server-side default or
603
inline SQL expression value was fired off. Applies to inserts
607
def create_cursor(self):
608
"""Return a new cursor generated from this ExecutionContext's
611
Some dialects may wish to change the behavior of
612
connection.cursor(), such as postgresql which may return a PG
613
"server side" cursor.
616
raise NotImplementedError()
619
"""Called before an execution of a compiled statement.
621
If a compiled statement was passed to this ExecutionContext,
622
the `statement` and `parameters` datamembers must be
623
initialized after this statement is complete.
626
raise NotImplementedError()
629
"""Called after the execution of a compiled statement.
631
If a compiled statement was passed to this ExecutionContext,
632
the `last_insert_ids`, `last_inserted_params`, etc.
633
datamembers should be available after this method completes.
636
raise NotImplementedError()
639
"""Return a result object corresponding to this ExecutionContext.
641
Returns a ResultProxy.
644
raise NotImplementedError()
646
def handle_dbapi_exception(self, e):
647
"""Receive a DBAPI exception which occurred upon execute, result
650
raise NotImplementedError()
652
def should_autocommit_text(self, statement):
653
"""Parse the given textual statement and return True if it refers to
654
a "committable" statement"""
656
raise NotImplementedError()
658
def lastrow_has_defaults(self):
659
"""Return True if the last INSERT or UPDATE row contained
660
inlined or database-side defaults.
663
raise NotImplementedError()
665
def get_rowcount(self):
666
"""Return the DBAPI ``cursor.rowcount`` value, or in some
667
cases an interpreted value.
669
See :attr:`.ResultProxy.rowcount` for details on this.
673
raise NotImplementedError()
676
class Compiled(object):
677
"""Represent a compiled SQL or DDL expression.
679
The ``__str__`` method of the ``Compiled`` object should produce
680
the actual text of the statement. ``Compiled`` objects are
681
specific to their underlying database dialect, and also may
682
or may not be specific to the columns referenced within a
683
particular set of bind parameters. In no case should the
684
``Compiled`` object be dependent on the actual values of those
685
bind parameters, even though it may reference those values as
689
def __init__(self, dialect, statement, bind=None):
690
"""Construct a new ``Compiled`` object.
692
:param dialect: ``Dialect`` to compile against.
694
:param statement: ``ClauseElement`` to be compiled.
696
:param bind: Optional Engine or Connection to compile this
700
self.dialect = dialect
702
if statement is not None:
703
self.statement = statement
704
self.can_execute = statement.supports_execution
705
self.string = self.process(self.statement)
707
@util.deprecated("0.7", ":class:`.Compiled` objects now compile "
708
"within the constructor.")
710
"""Produce the internal string representation of this element."""
714
def sql_compiler(self):
715
"""Return a Compiled that is capable of processing SQL expressions.
717
If this compiler is one, it would likely just return 'self'.
721
raise NotImplementedError()
723
def process(self, obj, **kwargs):
724
return obj._compiler_dispatch(self, **kwargs)
727
"""Return the string text of the generated SQL or DDL."""
729
return self.string or ''
731
def construct_params(self, params=None):
732
"""Return the bind params for this compiled object.
734
:param params: a dict of string/object pairs whose values will
735
override bind values compiled in to the
739
raise NotImplementedError()
743
"""Return the bind params for this compiled object."""
744
return self.construct_params()
746
def execute(self, *multiparams, **params):
747
"""Execute this compiled object."""
751
raise exc.UnboundExecutionError(
752
"This Compiled object is not bound to any Engine "
754
return e._execute_compiled(self, multiparams, params)
756
def scalar(self, *multiparams, **params):
757
"""Execute this compiled object and return the result's
760
return self.execute(*multiparams, **params).scalar()
763
class TypeCompiler(object):
764
"""Produces DDL specification for TypeEngine objects."""
766
def __init__(self, dialect):
767
self.dialect = dialect
769
def process(self, type_):
770
return type_._compiler_dispatch(self)
773
class Connectable(object):
774
"""Interface for an object which supports execution of SQL constructs.
776
The two implementations of :class:`.Connectable` are :class:`.Connection` and
779
Connectable must also implement the 'dialect' member which references a
780
:class:`.Dialect` instance.
784
def connect(self, **kwargs):
785
"""Return a :class:`.Connection` object.
787
Depending on context, this may be ``self`` if this object
788
is already an instance of :class:`.Connection`, or a newly
789
procured :class:`.Connection` if this object is an instance
794
def contextual_connect(self):
795
"""Return a :class:`.Connection` object which may be part of an ongoing
798
Depending on context, this may be ``self`` if this object
799
is already an instance of :class:`.Connection`, or a newly
800
procured :class:`.Connection` if this object is an instance
805
raise NotImplementedError()
807
@util.deprecated("0.7", "Use the create() method on the given schema "
808
"object directly, i.e. :meth:`.Table.create`, "
809
":meth:`.Index.create`, :meth:`.MetaData.create_all`")
810
def create(self, entity, **kwargs):
811
"""Emit CREATE statements for the given schema entity."""
813
raise NotImplementedError()
815
@util.deprecated("0.7", "Use the drop() method on the given schema "
816
"object directly, i.e. :meth:`.Table.drop`, "
817
":meth:`.Index.drop`, :meth:`.MetaData.drop_all`")
818
def drop(self, entity, **kwargs):
819
"""Emit DROP statements for the given schema entity."""
821
raise NotImplementedError()
823
def execute(self, object, *multiparams, **params):
824
"""Executes the given construct and returns a :class:`.ResultProxy`."""
825
raise NotImplementedError()
827
def scalar(self, object, *multiparams, **params):
828
"""Executes and returns the first column of the first row.
830
The underlying cursor is closed after execution.
832
raise NotImplementedError()
834
def _run_visitor(self, visitorcallable, element,
836
raise NotImplementedError()
838
def _execute_clauseelement(self, elem, multiparams=None, params=None):
839
raise NotImplementedError()
12
from __future__ import with_statement
14
from .. import exc, schema, util, log, interfaces
15
from ..sql import expression, util as sql_util
16
from .interfaces import Connectable, Compiled
17
from .util import _distill_params
842
21
class Connection(Connectable):
2557
1699
return self.pool.unique_connection()
2560
# This reconstructor is necessary so that pickles with the C extension or
2561
# without use the same Binary format.
2563
# We need a different reconstructor on the C extension so that we can
2564
# add extra checks that fields have correctly been initialized by
2566
from sqlalchemy.cresultproxy import safe_rowproxy_reconstructor
2568
# The extra function embedding is needed so that the
2569
# reconstructor function has the same signature whether or not
2570
# the extension is present.
2571
def rowproxy_reconstructor(cls, state):
2572
return safe_rowproxy_reconstructor(cls, state)
2574
def rowproxy_reconstructor(cls, state):
2575
obj = cls.__new__(cls)
2576
obj.__setstate__(state)
2580
from sqlalchemy.cresultproxy import BaseRowProxy
2582
class BaseRowProxy(object):
2583
__slots__ = ('_parent', '_row', '_processors', '_keymap')
2585
def __init__(self, parent, row, processors, keymap):
2586
"""RowProxy objects are constructed by ResultProxy objects."""
2588
self._parent = parent
2590
self._processors = processors
2591
self._keymap = keymap
2593
def __reduce__(self):
2594
return (rowproxy_reconstructor,
2595
(self.__class__, self.__getstate__()))
2598
"""Return the values represented by this RowProxy as a list."""
2602
for processor, value in izip(self._processors, self._row):
2603
if processor is None:
2606
yield processor(value)
2609
return len(self._row)
2611
def __getitem__(self, key):
2613
processor, obj, index = self._keymap[key]
2615
processor, obj, index = self._parent._key_fallback(key)
2617
if isinstance(key, slice):
2619
for processor, value in izip(self._processors[key],
2621
if processor is None:
2624
l.append(processor(value))
2629
raise exc.InvalidRequestError(
2630
"Ambiguous column name '%s' in result set! "
2631
"try 'use_labels' option on select statement." % key)
2632
if processor is not None:
2633
return processor(self._row[index])
2635
return self._row[index]
2637
def __getattr__(self, name):
2641
raise AttributeError(e.args[0])
2644
class RowProxy(BaseRowProxy):
2645
"""Proxy values from a single cursor row.
2647
Mostly follows "ordered dictionary" behavior, mapping result
2648
values to the string-based column name, the integer position of
2649
the result in the row, as well as Column instances which can be
2650
mapped to the original Columns that produced this result set (for
2651
results that correspond to constructed SQL expressions).
2655
def __contains__(self, key):
2656
return self._parent._has_key(self._row, key)
2658
def __getstate__(self):
2660
'_parent': self._parent,
2664
def __setstate__(self, state):
2665
self._parent = parent = state['_parent']
2666
self._row = state['_row']
2667
self._processors = parent._processors
2668
self._keymap = parent._keymap
2672
def __eq__(self, other):
2673
return other is self or other == tuple(self)
2675
def __ne__(self, other):
2676
return not self.__eq__(other)
2679
return repr(tuple(self))
2681
def has_key(self, key):
2682
"""Return True if this RowProxy contains the given key."""
2684
return self._parent._has_key(self._row, key)
2687
"""Return a list of tuples, each tuple containing a key/value pair."""
2688
# TODO: no coverage here
2689
return [(key, self[key]) for key in self.iterkeys()]
2692
"""Return the list of keys as strings represented by this RowProxy."""
2694
return self._parent.keys
2697
return iter(self._parent.keys)
2699
def itervalues(self):
2703
# Register RowProxy with Sequence,
2704
# so sequence protocol is implemented
2705
from collections import Sequence
2706
Sequence.register(RowProxy)
2711
class ResultMetaData(object):
2712
"""Handle cursor.description, applying additional info from an execution
2715
def __init__(self, parent, metadata):
2716
self._processors = processors = []
2718
# We do not strictly need to store the processor in the key mapping,
2719
# though it is faster in the Python version (probably because of the
2720
# saved attribute lookup self._processors)
2721
self._keymap = keymap = {}
2723
context = parent.context
2724
dialect = context.dialect
2725
typemap = dialect.dbapi_type_map
2726
translate_colname = context._translate_colname
2728
# high precedence key values.
2731
for i, rec in enumerate(metadata):
2735
if dialect.description_encoding:
2736
colname = dialect._description_decoder(colname)
2738
if translate_colname:
2739
colname, untranslated = translate_colname(colname)
2741
if context.result_map:
2743
name, obj, type_ = context.result_map[colname.lower()]
2745
name, obj, type_ = \
2746
colname, None, typemap.get(coltype, types.NULLTYPE)
2748
name, obj, type_ = \
2749
colname, None, typemap.get(coltype, types.NULLTYPE)
2751
processor = type_._cached_result_processor(dialect, coltype)
2753
processors.append(processor)
2754
rec = (processor, obj, i)
2756
# indexes as keys. This is only needed for the Python version of
2757
# RowProxy (the C version uses a faster path for integer indexes).
2758
primary_keymap[i] = rec
2760
# populate primary keymap, looking for conflicts.
2761
if primary_keymap.setdefault(name.lower(), rec) is not rec:
2762
# place a record that doesn't have the "index" - this
2763
# is interpreted later as an AmbiguousColumnError,
2764
# but only when actually accessed. Columns
2765
# colliding by name is not a problem if those names
2766
# aren't used; integer and ColumnElement access is always
2768
primary_keymap[name.lower()] = (processor, obj, None)
2770
if dialect.requires_name_normalize:
2771
colname = dialect.normalize_name(colname)
2773
self.keys.append(colname)
2778
if translate_colname and \
2780
keymap[untranslated] = rec
2782
# overwrite keymap values with those of the
2783
# high precedence keymap.
2784
keymap.update(primary_keymap)
2787
context.engine.logger.debug(
2788
"Col %r", tuple(x[0] for x in metadata))
2790
@util.pending_deprecation("0.8", "sqlite dialect uses "
2791
"_translate_colname() now")
2792
def _set_keymap_synonym(self, name, origname):
2793
"""Set a synonym for the given name.
2795
Some dialects (SQLite at the moment) may use this to
2796
adjust the column names that are significant within a
2800
rec = (processor, obj, i) = self._keymap[origname.lower()]
2801
if self._keymap.setdefault(name, rec) is not rec:
2802
self._keymap[name] = (processor, obj, None)
2804
def _key_fallback(self, key, raiseerr=True):
2807
if isinstance(key, basestring):
2808
result = map.get(key.lower())
2809
# fallback for targeting a ColumnElement to a textual expression
2810
# this is a rare use case which only occurs when matching text()
2811
# or colummn('name') constructs to ColumnElements, or after a
2812
# pickle/unpickle roundtrip
2813
elif isinstance(key, expression.ColumnElement):
2814
if key._label and key._label.lower() in map:
2815
result = map[key._label.lower()]
2816
elif hasattr(key, 'name') and key.name.lower() in map:
2817
# match is only on name.
2818
result = map[key.name.lower()]
2819
# search extra hard to make sure this
2820
# isn't a column/label name overlap.
2821
# this check isn't currently available if the row
2823
if result is not None and \
2824
result[1] is not None:
2825
for obj in result[1]:
2826
if key._compare_name_for_result(obj):
2832
raise exc.NoSuchColumnError(
2833
"Could not locate column in row for column '%s'" %
2834
expression._string_or_unprintable(key))
2841
def _has_key(self, row, key):
2842
if key in self._keymap:
2845
return self._key_fallback(key, False) is not None
2847
def __getstate__(self):
2849
'_pickled_keymap': dict(
2851
for key, (processor, obj, index) in self._keymap.iteritems()
2852
if isinstance(key, (basestring, int))
2857
def __setstate__(self, state):
2858
# the row has been processed at pickling time so we don't need any
2860
self._processors = [None for _ in xrange(len(state['keys']))]
2861
self._keymap = keymap = {}
2862
for key, index in state['_pickled_keymap'].iteritems():
2863
# not preserving "obj" here, unfortunately our
2864
# proxy comparison fails with the unpickle
2865
keymap[key] = (None, None, index)
2866
self.keys = state['keys']
2870
class ResultProxy(object):
2871
"""Wraps a DB-API cursor object to provide easier access to row columns.
2873
Individual columns may be accessed by their integer position,
2874
case-insensitive column name, or by ``schema.Column``
2879
col1 = row[0] # access via integer position
2881
col2 = row['col2'] # access via name
2883
col3 = row[mytable.c.mycol] # access via Column object.
2885
``ResultProxy`` also handles post-processing of result column
2886
data using ``TypeEngine`` objects, which are referenced from
2887
the originating SQL statement that produced this result set.
2891
_process_row = RowProxy
2892
out_parameters = None
2893
_can_close_connection = False
2895
def __init__(self, context):
2896
self.context = context
2897
self.dialect = context.dialect
2899
self.cursor = self._saved_cursor = context.cursor
2900
self.connection = context.root_connection
2901
self._echo = self.connection._echo and \
2902
context.engine._should_log_debug()
2903
self._init_metadata()
2905
def _init_metadata(self):
2906
metadata = self._cursor_description()
2907
if metadata is None:
2908
self._metadata = None
2910
self._metadata = ResultMetaData(self, metadata)
2913
"""Return the current set of string keys for rows."""
2915
return self._metadata.keys
2919
@util.memoized_property
2921
"""Return the 'rowcount' for this result.
2923
The 'rowcount' reports the number of rows *matched*
2924
by the WHERE criterion of an UPDATE or DELETE statement.
2928
Notes regarding :attr:`.ResultProxy.rowcount`:
2931
* This attribute returns the number of rows *matched*,
2932
which is not necessarily the same as the number of rows
2933
that were actually *modified* - an UPDATE statement, for example,
2934
may have no net change on a given row if the SET values
2935
given are the same as those present in the row already.
2936
Such a row would be matched but not modified.
2937
On backends that feature both styles, such as MySQL,
2938
rowcount is configured by default to return the match
2941
* :attr:`.ResultProxy.rowcount` is *only* useful in conjunction
2942
with an UPDATE or DELETE statement. Contrary to what the Python
2943
DBAPI says, it does *not* return the
2944
number of rows available from the results of a SELECT statement
2945
as DBAPIs cannot support this functionality when rows are
2948
* :attr:`.ResultProxy.rowcount` may not be fully implemented by
2949
all dialects. In particular, most DBAPIs do not support an
2950
aggregate rowcount result from an executemany call.
2951
The :meth:`.ResultProxy.supports_sane_rowcount` and
2952
:meth:`.ResultProxy.supports_sane_multi_rowcount` methods
2953
will report from the dialect if each usage is known to be
2956
* Statements that use RETURNING may not return a correct
2961
return self.context.rowcount
2962
except Exception, e:
2963
self.connection._handle_dbapi_exception(
2964
e, None, None, self.cursor, self.context)
2968
def lastrowid(self):
2969
"""return the 'lastrowid' accessor on the DBAPI cursor.
2971
This is a DBAPI specific method and is only functional
2972
for those backends which support it, for statements
2973
where it is appropriate. It's behavior is not
2974
consistent across backends.
2976
Usage of this method is normally unnecessary; the
2977
:attr:`~ResultProxy.inserted_primary_key` attribute provides a
2978
tuple of primary key values for a newly inserted row,
2979
regardless of database backend.
2983
return self._saved_cursor.lastrowid
2984
except Exception, e:
2985
self.connection._handle_dbapi_exception(
2987
self._saved_cursor, self.context)
2991
def returns_rows(self):
2992
"""True if this :class:`.ResultProxy` returns rows.
2994
I.e. if it is legal to call the methods
2995
:meth:`~.ResultProxy.fetchone`,
2996
:meth:`~.ResultProxy.fetchmany`
2997
:meth:`~.ResultProxy.fetchall`.
3000
return self._metadata is not None
3003
def is_insert(self):
3004
"""True if this :class:`.ResultProxy` is the result
3005
of a executing an expression language compiled
3006
:func:`.expression.insert` construct.
3008
When True, this implies that the
3009
:attr:`inserted_primary_key` attribute is accessible,
3010
assuming the statement did not include
3011
a user defined "returning" construct.
3014
return self.context.isinsert
3016
def _cursor_description(self):
3017
"""May be overridden by subclasses."""
3019
return self._saved_cursor.description
3021
def close(self, _autoclose_connection=True):
3022
"""Close this ResultProxy.
3024
Closes the underlying DBAPI cursor corresponding to the execution.
3026
Note that any data cached within this ResultProxy is still available.
3027
For some types of results, this may include buffered rows.
3029
If this ResultProxy was generated from an implicit execution,
3030
the underlying Connection will also be closed (returns the
3031
underlying DBAPI connection to the connection pool.)
3033
This method is called automatically when:
3035
* all result rows are exhausted using the fetchXXX() methods.
3036
* cursor.description is None.
3042
self.connection._safe_close_cursor(self.cursor)
3043
if _autoclose_connection and \
3044
self.connection.should_close_with_result:
3045
self.connection.close()
3046
# allow consistent errors
3051
row = self.fetchone()
3057
@util.memoized_property
3058
def inserted_primary_key(self):
3059
"""Return the primary key for the row just inserted.
3061
The return value is a list of scalar values
3062
corresponding to the list of primary key columns
3063
in the target table.
3065
This only applies to single row :func:`.insert`
3066
constructs which did not explicitly specify
3067
:meth:`.Insert.returning`.
3069
Note that primary key columns which specify a
3070
server_default clause,
3071
or otherwise do not qualify as "autoincrement"
3072
columns (see the notes at :class:`.Column`), and were
3073
generated using the database-side default, will
3074
appear in this list as ``None`` unless the backend
3075
supports "returning" and the insert statement executed
3076
with the "implicit returning" enabled.
3080
if not self.context.isinsert:
3081
raise exc.InvalidRequestError(
3082
"Statement is not an insert() expression construct.")
3083
elif self.context._is_explicit_returning:
3084
raise exc.InvalidRequestError(
3085
"Can't call inserted_primary_key when returning() "
3088
return self.context.inserted_primary_key
3090
@util.deprecated("0.6", "Use :attr:`.ResultProxy.inserted_primary_key`")
3091
def last_inserted_ids(self):
3092
"""Return the primary key for the row just inserted."""
3094
return self.inserted_primary_key
3096
def last_updated_params(self):
3097
"""Return the collection of updated parameters from this
3101
if self.context.executemany:
3102
return self.context.compiled_parameters
3104
return self.context.compiled_parameters[0]
3106
def last_inserted_params(self):
3107
"""Return the collection of inserted parameters from this
3111
if self.context.executemany:
3112
return self.context.compiled_parameters
3114
return self.context.compiled_parameters[0]
3116
def lastrow_has_defaults(self):
3117
"""Return ``lastrow_has_defaults()`` from the underlying
3120
See ExecutionContext for details.
3123
return self.context.lastrow_has_defaults()
3125
def postfetch_cols(self):
3126
"""Return ``postfetch_cols()`` from the underlying ExecutionContext.
3128
See ExecutionContext for details.
3131
return self.context.postfetch_cols
3133
def prefetch_cols(self):
3134
return self.context.prefetch_cols
3136
def supports_sane_rowcount(self):
3137
"""Return ``supports_sane_rowcount`` from the dialect.
3139
See :attr:`.ResultProxy.rowcount` for background.
3143
return self.dialect.supports_sane_rowcount
3145
def supports_sane_multi_rowcount(self):
3146
"""Return ``supports_sane_multi_rowcount`` from the dialect.
3148
See :attr:`.ResultProxy.rowcount` for background.
3152
return self.dialect.supports_sane_multi_rowcount
3154
def _fetchone_impl(self):
3156
return self.cursor.fetchone()
3157
except AttributeError:
3160
def _fetchmany_impl(self, size=None):
3163
return self.cursor.fetchmany()
3165
return self.cursor.fetchmany(size)
3166
except AttributeError:
3169
def _fetchall_impl(self):
3171
return self.cursor.fetchall()
3172
except AttributeError:
3175
def _non_result(self):
3176
if self._metadata is None:
3177
raise exc.ResourceClosedError(
3178
"This result object does not return rows. "
3179
"It has been closed automatically.",
3182
raise exc.ResourceClosedError("This result object is closed.")
3184
def process_rows(self, rows):
3185
process_row = self._process_row
3186
metadata = self._metadata
3187
keymap = metadata._keymap
3188
processors = metadata._processors
3190
log = self.context.engine.logger.debug
3194
l.append(process_row(metadata, row, processors, keymap))
3197
return [process_row(metadata, row, processors, keymap)
3201
"""Fetch all rows, just like DB-API ``cursor.fetchall()``."""
3204
l = self.process_rows(self._fetchall_impl())
3207
except Exception, e:
3208
self.connection._handle_dbapi_exception(
3210
self.cursor, self.context)
3213
def fetchmany(self, size=None):
3214
"""Fetch many rows, just like DB-API
3215
``cursor.fetchmany(size=cursor.arraysize)``.
3217
If rows are present, the cursor remains open after this is called.
3218
Else the cursor is automatically closed and an empty list is returned.
3223
l = self.process_rows(self._fetchmany_impl(size))
3227
except Exception, e:
3228
self.connection._handle_dbapi_exception(
3230
self.cursor, self.context)
3234
"""Fetch one row, just like DB-API ``cursor.fetchone()``.
3236
If a row is present, the cursor remains open after this is called.
3237
Else the cursor is automatically closed and None is returned.
3241
row = self._fetchone_impl()
3243
return self.process_rows([row])[0]
3247
except Exception, e:
3248
self.connection._handle_dbapi_exception(
3250
self.cursor, self.context)
3254
"""Fetch the first row and then close the result set unconditionally.
3256
Returns None if no row is present.
3259
if self._metadata is None:
3263
row = self._fetchone_impl()
3264
except Exception, e:
3265
self.connection._handle_dbapi_exception(
3267
self.cursor, self.context)
3272
return self.process_rows([row])[0]
3279
"""Fetch the first column of the first row, and close the result set.
3281
Returns None if no row is present.
3290
class BufferedRowResultProxy(ResultProxy):
3291
"""A ResultProxy with row buffering behavior.
3293
``ResultProxy`` that buffers the contents of a selection of rows
3294
before ``fetchone()`` is called. This is to allow the results of
3295
``cursor.description`` to be available immediately, when
3296
interfacing with a DB-API that requires rows to be consumed before
3297
this information is available (currently psycopg2, when used with
3298
server-side cursors).
3300
The pre-fetching behavior fetches only one row initially, and then
3301
grows its buffer size by a fixed amount with each successive need
3302
for additional rows up to a size of 100.
3305
def _init_metadata(self):
3306
self.__buffer_rows()
3307
super(BufferedRowResultProxy, self)._init_metadata()
3309
# this is a "growth chart" for the buffering of rows.
3310
# each successive __buffer_rows call will use the next
3311
# value in the list for the buffer size until the max
3324
def __buffer_rows(self):
3325
size = getattr(self, '_bufsize', 1)
3326
self.__rowbuffer = collections.deque(self.cursor.fetchmany(size))
3327
self._bufsize = self.size_growth.get(size, size)
3329
def _fetchone_impl(self):
3332
if not self.__rowbuffer:
3333
self.__buffer_rows()
3334
if not self.__rowbuffer:
3336
return self.__rowbuffer.popleft()
3338
def _fetchmany_impl(self, size=None):
3340
return self._fetchall_impl()
3342
for x in range(0, size):
3343
row = self._fetchone_impl()
3349
def _fetchall_impl(self):
3350
self.__rowbuffer.extend(self.cursor.fetchall())
3351
ret = self.__rowbuffer
3352
self.__rowbuffer = collections.deque()
3355
class FullyBufferedResultProxy(ResultProxy):
3356
"""A result proxy that buffers rows fully upon creation.
3358
Used for operations where a result is to be delivered
3359
after the database conversation can not be continued,
3360
such as MSSQL INSERT...OUTPUT after an autocommit.
3363
def _init_metadata(self):
3364
super(FullyBufferedResultProxy, self)._init_metadata()
3365
self.__rowbuffer = self._buffer_rows()
3367
def _buffer_rows(self):
3368
return collections.deque(self.cursor.fetchall())
3370
def _fetchone_impl(self):
3371
if self.__rowbuffer:
3372
return self.__rowbuffer.popleft()
3376
def _fetchmany_impl(self, size=None):
3378
return self._fetchall_impl()
3380
for x in range(0, size):
3381
row = self._fetchone_impl()
3387
def _fetchall_impl(self):
3388
ret = self.__rowbuffer
3389
self.__rowbuffer = collections.deque()
3392
class BufferedColumnRow(RowProxy):
3393
def __init__(self, parent, row, processors, keymap):
3396
# this is a tad faster than using enumerate
3398
for processor in parent._orig_processors:
3399
if processor is not None:
3400
row[index] = processor(row[index])
3403
super(BufferedColumnRow, self).__init__(parent, row,
3406
class BufferedColumnResultProxy(ResultProxy):
3407
"""A ResultProxy with column buffering behavior.
3409
``ResultProxy`` that loads all columns into memory each time
3410
fetchone() is called. If fetchmany() or fetchall() are called,
3411
the full grid of results is fetched. This is to operate with
3412
databases where result rows contain "live" results that fall out
3413
of scope unless explicitly fetched. Currently this includes
3414
cx_Oracle LOB objects.
3418
_process_row = BufferedColumnRow
3420
def _init_metadata(self):
3421
super(BufferedColumnResultProxy, self)._init_metadata()
3422
metadata = self._metadata
3423
# orig_processors will be used to preprocess each row when they are
3425
metadata._orig_processors = metadata._processors
3426
# replace the all type processors by None processors.
3427
metadata._processors = [None for _ in xrange(len(metadata.keys))]
3429
for k, (func, obj, index) in metadata._keymap.iteritems():
3430
keymap[k] = (None, obj, index)
3431
self._metadata._keymap = keymap
3434
# can't call cursor.fetchall(), since rows must be
3435
# fully processed before requesting more from the DBAPI.
3438
row = self.fetchone()
3444
def fetchmany(self, size=None):
3445
# can't call cursor.fetchmany(), since rows must be
3446
# fully processed before requesting more from the DBAPI.
3448
return self.fetchall()
3450
for i in xrange(size):
3451
row = self.fetchone()
3457
def connection_memoize(key):
3458
"""Decorator, memoize a function in a connection.info stash.
3460
Only applicable to functions which take no arguments other than a
3461
connection. The memo will be stored in ``connection.info[key]``.
3465
def decorated(fn, self, connection):
3466
connection = connection.connect()
3468
return connection.info[key]
3470
connection.info[key] = val = fn(self, connection)
1702
class OptionEngine(Engine):
1703
def __init__(self, proxied, execution_options):
1704
self._proxied = proxied
1705
self.url = proxied.url
1706
self.dialect = proxied.dialect
1707
self.logging_name = proxied.logging_name
1708
self.echo = proxied.echo
1709
log.instance_logger(self, echoflag=self.echo)
1710
self.dispatch = self.dispatch._join(proxied.dispatch)
1711
self._execution_options = proxied._execution_options
1712
self.update_execution_options(**execution_options)
1714
def _get_pool(self):
1715
return self._proxied.pool
1717
def _set_pool(self, pool):
1718
self._proxied.pool = pool
1720
pool = property(_get_pool, _set_pool)
1722
def _get_has_events(self):
1723
return self._proxied._has_events or \
1724
self.__dict__.get('_has_events', False)
1726
def _set_has_events(self, value):
1727
self.__dict__['_has_events'] = value
1729
_has_events = property(_get_has_events, _set_has_events)