648
class UnicodeTest(fixtures.TestBase, AssertsExecutionResults):
649
"""tests the Unicode type. also tests the TypeDecorator with instances in the types package."""
652
def setup_class(cls):
653
global unicode_table, metadata
654
metadata = MetaData(testing.db)
655
unicode_table = Table('unicode_table', metadata,
656
Column('id', Integer, Sequence('uni_id_seq', optional=True), primary_key=True),
657
Column('unicode_varchar', Unicode(250)),
658
Column('unicode_text', UnicodeText),
660
metadata.create_all()
663
def teardown_class(cls):
668
unicode_table.delete().execute()
666
class UnicodeTest(fixtures.TestBase):
667
"""Exercise the Unicode and related types.
669
Note: unicode round trip tests are now in
670
sqlalchemy/testing/suite/test_types.py.
670
674
def test_native_unicode(self):
671
675
"""assert expected values for 'native unicode' mode"""
674
(testing.against('mssql+pyodbc') and not testing.db.dialect.freetds):
675
assert testing.db.dialect.returns_unicode_strings == 'conditional'
678
if testing.against('mssql+pymssql'):
679
assert testing.db.dialect.returns_unicode_strings == ('charset' in testing.db.url.query)
682
assert testing.db.dialect.returns_unicode_strings == \
683
((testing.db.name, testing.db.driver) in \
685
('postgresql','psycopg2'),
686
('postgresql','pypostgresql'),
687
('postgresql','pg8000'),
688
('postgresql','zxjdbc'),
691
('mysql','mysqlconnector'),
693
('sqlite','pysqlite'),
695
('oracle','cx_oracle'),
697
"name: %s driver %s returns_unicode_strings=%s" % \
700
testing.db.dialect.returns_unicode_strings)
702
def test_round_trip(self):
703
unicodedata = u"Alors vous imaginez ma surprise, au lever du jour, "\
704
u"quand une drôle de petite voix m’a réveillé. Elle "\
705
u"disait: « S’il vous plaît… dessine-moi un mouton! »"
707
unicode_table.insert().execute(unicode_varchar=unicodedata,unicode_text=unicodedata)
709
x = unicode_table.select().execute().first()
710
assert isinstance(x['unicode_varchar'], unicode)
711
assert isinstance(x['unicode_text'], unicode)
712
eq_(x['unicode_varchar'], unicodedata)
713
eq_(x['unicode_text'], unicodedata)
715
def test_round_trip_executemany(self):
716
# cx_oracle was producing different behavior for cursor.executemany()
717
# vs. cursor.execute()
719
unicodedata = u"Alors vous imaginez ma surprise, au lever du jour, quand "\
720
u"une drôle de petite voix m’a réveillé. "\
721
u"Elle disait: « S’il vous plaît… dessine-moi un mouton! »"
723
unicode_table.insert().execute(
724
dict(unicode_varchar=unicodedata,unicode_text=unicodedata),
725
dict(unicode_varchar=unicodedata,unicode_text=unicodedata)
728
x = unicode_table.select().execute().first()
729
assert isinstance(x['unicode_varchar'], unicode)
730
eq_(x['unicode_varchar'], unicodedata)
731
assert isinstance(x['unicode_text'], unicode)
732
eq_(x['unicode_text'], unicodedata)
734
def test_union(self):
735
"""ensure compiler processing works for UNIONs"""
737
unicodedata = u"Alors vous imaginez ma surprise, au lever du jour, quand "\
738
u"une drôle de petite voix m’a réveillé. "\
739
u"Elle disait: « S’il vous plaît… dessine-moi un mouton! »"
741
unicode_table.insert().execute(unicode_varchar=unicodedata,unicode_text=unicodedata)
744
select([unicode_table.c.unicode_varchar]),
745
select([unicode_table.c.unicode_varchar])
748
assert isinstance(x['unicode_varchar'], unicode)
749
eq_(x['unicode_varchar'], unicodedata)
751
@testing.fails_on('oracle', 'oracle converts empty strings to a blank space')
752
def test_blank_strings(self):
753
unicode_table.insert().execute(unicode_varchar=u'')
754
assert select([unicode_table.c.unicode_varchar]).scalar() == u''
756
def test_unicode_warnings(self):
757
"""test the warnings raised when SQLA must coerce unicode binds,
758
*and* is using the Unicode type.
762
unicodedata = u"Alors vous imaginez ma surprise, au lever du jour, quand "\
763
u"une drôle de petite voix m’a réveillé. "\
764
u"Elle disait: « S’il vous plaît… dessine-moi un mouton! »"
766
# using Unicode explicly - warning should be emitted
768
uni = u.dialect_impl(testing.db.dialect).bind_processor(testing.db.dialect)
769
if testing.db.dialect.supports_unicode_binds:
771
#assert_raises(exc.SAWarning, uni, b'x')
772
#assert isinstance(uni(unicodedata), str)
774
assert_raises(exc.SAWarning, uni, 'x')
775
assert isinstance(uni(unicodedata), unicode)
778
eq_(uni(unicodedata), unicodedata)
677
if (testing.against('mssql+pyodbc') and
678
not testing.db.dialect.freetds) \
679
or testing.against('mssql+mxodbc'):
681
testing.db.dialect.returns_unicode_strings,
685
elif testing.against('mssql+pymssql'):
687
testing.db.dialect.returns_unicode_strings,
688
('charset' in testing.db.url.query)
691
elif testing.against('mysql+cymysql', 'mysql+pymssql'):
693
testing.db.dialect.returns_unicode_strings,
694
True if util.py3k else False
781
#assert_raises(exc.SAWarning, uni, b'x')
782
#assert isinstance(uni(unicodedata), bytes)
784
assert_raises(exc.SAWarning, uni, 'x')
785
assert isinstance(uni(unicodedata), str)
788
eq_(uni(unicodedata), unicodedata.encode('utf-8'))
790
# using convert unicode at engine level -
791
# this should not be raising a warning
792
unicode_engine = engines.utf8_engine(options={'convert_unicode':True,})
793
unicode_engine.dialect.supports_unicode_binds = False
699
expected = (testing.db.name, testing.db.driver) in \
701
('postgresql', 'psycopg2'),
702
('postgresql', 'pypostgresql'),
703
('postgresql', 'pg8000'),
704
('postgresql', 'zxjdbc'),
707
('mysql', 'mysqlconnector'),
708
('sqlite', 'pysqlite'),
709
('oracle', 'zxjdbc'),
710
('oracle', 'cx_oracle'),
714
testing.db.dialect.returns_unicode_strings,
718
data = u"Alors vous imaginez ma surprise, au lever du jour, quand "\
719
u"une drôle de petite voix m’a réveillé. "\
720
u"Elle disait: « S’il vous plaît… dessine-moi un mouton! »"
722
def test_unicode_warnings_typelevel_native_unicode(self):
724
unicodedata = self.data
726
dialect = default.DefaultDialect()
727
dialect.supports_unicode_binds = True
728
uni = u.dialect_impl(dialect).bind_processor(dialect)
730
#assert_raises(exc.SAWarning, uni, b'x')
731
#assert isinstance(uni(unicodedata), str)
733
assert_raises(exc.SAWarning, uni, 'x')
734
assert isinstance(uni(unicodedata), unicode)
737
def test_unicode_warnings_typelevel_sqla_unicode(self):
738
unicodedata = self.data
740
dialect = default.DefaultDialect()
741
dialect.supports_unicode_binds = False
742
uni = u.dialect_impl(dialect).bind_processor(dialect)
744
#assert_raises(exc.SAWarning, uni, b'x')
745
#assert isinstance(uni(unicodedata), bytes)
747
assert_raises(exc.SAWarning, uni, 'x')
748
assert isinstance(uni(unicodedata), str)
751
eq_(uni(unicodedata), unicodedata.encode('utf-8'))
753
def test_unicode_warnings_dialectlevel(self):
755
unicodedata = self.data
757
dialect = default.DefaultDialect(convert_unicode=True)
758
dialect.supports_unicode_binds = False
796
uni = s.dialect_impl(unicode_engine.dialect).bind_processor(unicode_engine.dialect)
761
uni = s.dialect_impl(dialect).bind_processor(dialect)
797
762
# this is not the unicode type - no warning
806
771
eq_(uni(unicodedata), unicodedata.encode('utf-8'))
810
# lambda: testing.db_spec("postgresql+pg8000")(testing.db),
811
# "pg8000 appropriately does not accept 'bytes' for a VARCHAR column."
813
773
def test_ignoring_unicode_error(self):
814
"""checks String(unicode_error='ignore') is passed to underlying codec."""
816
unicodedata = u"Alors vous imaginez ma surprise, au lever du jour, quand "\
817
u"une drôle de petite voix m’a réveillé. "\
818
u"Elle disait: « S’il vous plaît… dessine-moi un mouton! »"
820
asciidata = unicodedata.encode('ascii', 'ignore')
823
table = Table('unicode_err_table', m,
824
Column('sort', Integer),
825
Column('plain_varchar_no_coding_error', \
826
String(248, convert_unicode='force', unicode_error='ignore'))
830
utf8_table = Table('unicode_err_table', m2,
831
Column('sort', Integer),
832
Column('plain_varchar_no_coding_error', \
833
String(248, convert_unicode=True))
836
engine = engines.testing_engine(options={'encoding':'ascii'})
839
# insert a row that should be ascii and
840
# coerce from unicode with ignore on the bind side
844
plain_varchar_no_coding_error=unicodedata
848
engine.dialect.encoding = 'utf-8'
849
from binascii import hexlify
851
# the row that we put in was stored as hexlified ascii
852
row = engine.execute(utf8_table.select()).first()
853
x = row['plain_varchar_no_coding_error']
854
connect_opts = engine.dialect.create_connect_args(testing.db.url)[1]
855
if isinstance(x, unicode):
856
x = x.encode('utf-8')
858
b = hexlify(asciidata)
861
# insert another row which will be stored with
866
plain_varchar_no_coding_error=unicodedata
869
# switch back to ascii
870
engine.dialect.encoding = 'ascii'
872
# one row will be ascii with ignores,
873
# the other will be either ascii with the ignores
874
# or just the straight unicode+ utf8 value if the
875
# dialect just returns unicode
876
result = engine.execute(table.select().order_by(table.c.sort))
877
ascii_row = result.fetchone()
878
utf8_row = result.fetchone()
881
x = ascii_row['plain_varchar_no_coding_error']
882
# on python3 "x" comes back as string (i.e. unicode),
883
# hexlify requires bytes
884
a = hexlify(x.encode('utf-8'))
885
b = hexlify(asciidata)
888
x = utf8_row['plain_varchar_no_coding_error']
889
if testing.against('mssql+pyodbc') and not testing.db.dialect.freetds:
890
# TODO: no clue what this is
893
u'Alors vous imaginez ma surprise, au lever du jour, quand une '
894
u'drle de petite voix ma rveill. Elle disait: Sil vous plat '
895
u'dessine-moi un mouton! '
897
elif engine.dialect.returns_unicode_strings:
774
"""checks String(unicode_error='ignore') is passed to
777
unicodedata = self.data
779
type_ = String(248, convert_unicode='force', unicode_error='ignore')
780
dialect = default.DefaultDialect(encoding='ascii')
781
proc = type_.result_processor(dialect, 10)
783
utfdata = unicodedata.encode('utf8')
786
unicodedata.encode('ascii', 'ignore').decode()
907
790
class EnumTest(fixtures.TestBase):
1381
1298
assert test_table.c.data.distinct().type == test_table.c.data.type
1383
1300
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
1384
def test_default_compile(self):
1385
"""test that the base dialect of the type object is used
1386
for default compilation.
1390
for type_, expected in (
1391
(String(), "VARCHAR"),
1392
(Integer(), "INTEGER"),
1393
(dialects.postgresql.INET(), "INET"),
1394
(dialects.postgresql.FLOAT(), "FLOAT"),
1395
(dialects.mysql.REAL(precision=8, scale=2), "REAL(8, 2)"),
1396
(dialects.postgresql.REAL(), "REAL"),
1397
(INTEGER(), "INTEGER"),
1398
(dialects.mysql.INTEGER(display_width=5), "INTEGER(5)")
1400
self.assert_compile(type_, expected,
1401
allow_dialect_select=True)
1403
class DateTest(fixtures.TestBase, AssertsExecutionResults):
1405
def setup_class(cls):
1406
global users_with_date, insert_data
1409
if testing.against('oracle'):
1412
datetime.datetime(2005, 11, 10, 0, 0),
1413
datetime.date(2005,11,10),
1414
datetime.datetime(2005, 11, 10, 0, 0, 0, 29384)),
1416
datetime.datetime(2005, 11, 10, 11, 52, 35),
1417
datetime.date(2005,10,10),
1418
datetime.datetime(2006, 5, 10, 15, 32, 47, 6754)),
1420
datetime.datetime(2006, 11, 10, 11, 52, 35),
1421
datetime.date(1970,4,1),
1422
datetime.datetime(2004, 9, 18, 4, 0, 52, 1043)),
1423
(10, 'colber', None, None, None),
1425
fnames = ['user_id', 'user_name', 'user_datetime',
1426
'user_date', 'user_time']
1428
collist = [Column('user_id', INT, primary_key=True),
1429
Column('user_name', VARCHAR(20)),
1430
Column('user_datetime', DateTime),
1431
Column('user_date', Date),
1432
Column('user_time', TIMESTAMP)]
1434
datetime_micro = 54839
1437
# Missing or poor microsecond support:
1438
if testing.against('mssql', 'mysql', 'firebird', '+zxjdbc'):
1439
datetime_micro, time_micro = 0, 0
1440
# No microseconds for TIME
1441
elif testing.against('maxdb'):
1446
datetime.datetime(2005, 11, 10, 0, 0),
1447
datetime.date(2005, 11, 10),
1448
datetime.time(12, 20, 2)),
1450
datetime.datetime(2005, 11, 10, 11, 52, 35),
1451
datetime.date(2005, 10, 10),
1452
datetime.time(0, 0, 0)),
1454
datetime.datetime(2005, 11, 10, 11, 52, 35, datetime_micro),
1455
datetime.date(1970, 4, 1),
1456
datetime.time(23, 59, 59, time_micro)),
1457
(10, 'colber', None, None, None),
1461
fnames = ['user_id', 'user_name', 'user_datetime',
1462
'user_date', 'user_time']
1464
collist = [Column('user_id', INT, primary_key=True),
1465
Column('user_name', VARCHAR(20)),
1466
Column('user_datetime', DateTime(timezone=False)),
1467
Column('user_date', Date),
1468
Column('user_time', Time)]
1470
if testing.against('sqlite', 'postgresql'):
1473
datetime.datetime(1850, 11, 10, 11, 52, 35, datetime_micro),
1474
datetime.date(1727,4,1),
1478
users_with_date = Table('query_users_with_date',
1479
MetaData(testing.db), *collist)
1480
users_with_date.create()
1481
insert_dicts = [dict(zip(fnames, d)) for d in insert_data]
1483
for idict in insert_dicts:
1484
users_with_date.insert().execute(**idict)
1487
def teardown_class(cls):
1488
users_with_date.drop()
1494
users_with_date.select().order_by(users_with_date.c.user_id).execute().fetchall())
1495
self.assert_(l == insert_data,
1496
'DateTest mismatch: got:%s expected:%s' % (l, insert_data))
1498
def testtextdate(self):
1499
x = testing.db.execute(text(
1500
"select user_datetime from query_users_with_date",
1501
typemap={'user_datetime':DateTime})).fetchall()
1503
self.assert_(isinstance(x[0][0], datetime.datetime))
1505
x = testing.db.execute(text(
1506
"select * from query_users_with_date where user_datetime=:somedate",
1507
bindparams=[bindparam('somedate', type_=types.DateTime)]),
1508
somedate=datetime.datetime(2005, 11, 10, 11, 52, 35)).fetchall()
1510
def testdate2(self):
1511
meta = MetaData(testing.db)
1512
t = Table('testdate', meta,
1513
Column('id', Integer,
1514
Sequence('datetest_id_seq', optional=True),
1516
Column('adate', Date), Column('adatetime', DateTime))
1517
t.create(checkfirst=True)
1519
d1 = datetime.date(2007, 10, 30)
1520
t.insert().execute(adate=d1, adatetime=d1)
1521
d2 = datetime.datetime(2007, 10, 30)
1522
t.insert().execute(adate=d2, adatetime=d2)
1524
x = t.select().execute().fetchall()[0]
1525
eq_(x.adate.__class__, datetime.date)
1526
eq_(x.adatetime.__class__, datetime.datetime)
1528
t.delete().execute()
1530
# test mismatched date/datetime
1531
t.insert().execute(adate=d2, adatetime=d2)
1532
eq_(select([t.c.adate, t.c.adatetime], t.c.adate==d1).execute().fetchall(), [(d1, d2)])
1533
eq_(select([t.c.adate, t.c.adatetime], t.c.adate==d1).execute().fetchall(), [(d1, d2)])
1536
t.drop(checkfirst=True)
1538
class StringTest(fixtures.TestBase):
1301
__dialect__ = 'default'
1540
1303
@testing.requires.unbounded_varchar
1541
def test_nolength_string(self):
1542
metadata = MetaData(testing.db)
1543
foo = Table('foo', metadata, Column('one', String))
1548
class NumericTest(fixtures.TestBase):
1551
metadata = MetaData(testing.db)
1556
@testing.emits_warning(r".*does \*not\* support Decimal objects natively")
1557
def _do_test(self, type_, input_, output, filter_=None, check_scale=False):
1558
t = Table('t', metadata, Column('x', type_))
1560
t.insert().execute([{'x':x} for x in input_])
1562
result = set([row[0] for row in t.select().execute()])
1563
output = set(output)
1565
result = set(filter_(x) for x in result)
1566
output = set(filter_(x) for x in output)
1572
[str(x) for x in result],
1573
[str(x) for x in output],
1576
def test_numeric_as_decimal(self):
1578
Numeric(precision=8, scale=4),
1579
[15.7563, decimal.Decimal("15.7563"), None],
1580
[decimal.Decimal("15.7563"), None],
1583
def test_numeric_as_float(self):
1584
if testing.against("oracle+cx_oracle"):
1585
filter_ = lambda n:n is not None and round(n, 5) or None
1590
Numeric(precision=8, scale=4, asdecimal=False),
1591
[15.7563, decimal.Decimal("15.7563"), None],
1596
def test_float_as_decimal(self):
1598
Float(precision=8, asdecimal=True),
1599
[15.7563, decimal.Decimal("15.7563"), None],
1600
[decimal.Decimal("15.7563"), None],
1601
filter_ = lambda n:n is not None and round(n, 5) or None
1604
def test_float_as_float(self):
1607
[15.7563, decimal.Decimal("15.7563")],
1609
filter_ = lambda n:n is not None and round(n, 5) or None
1612
@testing.fails_on('mssql+pymssql', 'FIXME: improve pymssql dec handling')
1613
def test_precision_decimal(self):
1615
decimal.Decimal("54.234246451650"),
1616
decimal.Decimal("0.004354"),
1617
decimal.Decimal("900.0"),
1621
Numeric(precision=18, scale=12),
1626
@testing.fails_on('mssql+pymssql', 'FIXME: improve pymssql dec handling')
1627
def test_enotation_decimal(self):
1628
"""test exceedingly small decimals.
1630
Decimal reports values with E notation when the exponent
1636
decimal.Decimal('1E-2'),
1637
decimal.Decimal('1E-3'),
1638
decimal.Decimal('1E-4'),
1639
decimal.Decimal('1E-5'),
1640
decimal.Decimal('1E-6'),
1641
decimal.Decimal('1E-7'),
1642
decimal.Decimal('1E-8'),
1643
decimal.Decimal("0.01000005940696"),
1644
decimal.Decimal("0.00000005940696"),
1645
decimal.Decimal("0.00000000000696"),
1646
decimal.Decimal("0.70000000000696"),
1647
decimal.Decimal("696E-12"),
1650
Numeric(precision=18, scale=14),
1655
@testing.fails_on("sybase+pyodbc",
1656
"Don't know how do get these values through FreeTDS + Sybase")
1657
@testing.fails_on("firebird", "Precision must be from 1 to 18")
1658
def test_enotation_decimal_large(self):
1659
"""test exceedingly large decimals.
1664
decimal.Decimal('4E+8'),
1665
decimal.Decimal("5748E+15"),
1666
decimal.Decimal('1.521E+15'),
1667
decimal.Decimal('00000000000000.1E+12'),
1670
Numeric(precision=25, scale=2),
1675
@testing.fails_on('sqlite', 'TODO')
1676
@testing.fails_on("firebird", "Precision must be from 1 to 18")
1677
@testing.fails_on("sybase+pysybase", "TODO")
1678
@testing.fails_on('mssql+pymssql', 'FIXME: improve pymssql dec handling')
1679
def test_many_significant_digits(self):
1681
decimal.Decimal("31943874831932418390.01"),
1682
decimal.Decimal("319438950232418390.273596"),
1683
decimal.Decimal("87673.594069654243"),
1686
Numeric(precision=38, scale=12),
1691
@testing.fails_on('oracle+cx_oracle',
1692
"this may be a bug due to the difficulty in handling "
1693
"oracle precision numerics"
1695
@testing.fails_on('postgresql+pg8000',
1696
"pg-8000 does native decimal but truncates the decimals.")
1697
def test_numeric_no_decimal(self):
1699
decimal.Decimal("1.000")
1702
Numeric(precision=5, scale=3),
1304
def test_string_plain(self):
1305
self.assert_compile(String(), "VARCHAR")
1307
def test_string_length(self):
1308
self.assert_compile(String(50), "VARCHAR(50)")
1310
def test_string_collation(self):
1311
self.assert_compile(String(50, collation="FOO"),
1312
'VARCHAR(50) COLLATE "FOO"')
1314
def test_char_plain(self):
1315
self.assert_compile(CHAR(), "CHAR")
1317
def test_char_length(self):
1318
self.assert_compile(CHAR(50), "CHAR(50)")
1320
def test_char_collation(self):
1321
self.assert_compile(CHAR(50, collation="FOO"),
1322
'CHAR(50) COLLATE "FOO"')
1324
def test_text_plain(self):
1325
self.assert_compile(Text(), "TEXT")
1327
def test_text_length(self):
1328
self.assert_compile(Text(50), "TEXT(50)")
1330
def test_text_collation(self):
1331
self.assert_compile(Text(collation="FOO"),
1332
'TEXT COLLATE "FOO"')
1334
def test_default_compile_pg_inet(self):
1335
self.assert_compile(dialects.postgresql.INET(), "INET",
1336
allow_dialect_select=True)
1338
def test_default_compile_pg_float(self):
1339
self.assert_compile(dialects.postgresql.FLOAT(), "FLOAT",
1340
allow_dialect_select=True)
1342
def test_default_compile_mysql_integer(self):
1343
self.assert_compile(
1344
dialects.mysql.INTEGER(display_width=5), "INTEGER(5)",
1345
allow_dialect_select=True)
1347
def test_numeric_plain(self):
1348
self.assert_compile(types.NUMERIC(), 'NUMERIC')
1350
def test_numeric_precision(self):
1351
self.assert_compile(types.NUMERIC(2), 'NUMERIC(2)')
1353
def test_numeric_scale(self):
1354
self.assert_compile(types.NUMERIC(2, 4), 'NUMERIC(2, 4)')
1356
def test_decimal_plain(self):
1357
self.assert_compile(types.DECIMAL(), 'DECIMAL')
1359
def test_decimal_precision(self):
1360
self.assert_compile(types.DECIMAL(2), 'DECIMAL(2)')
1362
def test_decimal_scale(self):
1363
self.assert_compile(types.DECIMAL(2, 4), 'DECIMAL(2, 4)')
1708
1368
class NumericRawSQLTest(fixtures.TestBase):
1709
1369
"""Test what DBAPIs and dialects return without any typing