Add basic SQLAlchemy setup
parent
574cb8bb49
commit
9bf7e9d5e0
|
@ -1,10 +1,12 @@
|
|||
import atexit
|
||||
from flask import Flask
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
from app.mod_devices import setup_mqtt, tear_down_mqtt
|
||||
|
||||
app = Flask(__name__, instance_relative_config=True)
|
||||
app.config.from_object('config')
|
||||
app.config.from_pyfile('config.py')
|
||||
db = SQLAlchemy(app)
|
||||
|
||||
def on_stop():
|
||||
print('Application stopping')
|
||||
|
|
|
@ -6,8 +6,8 @@ import os
|
|||
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
# Define the database - we are working with
|
||||
# SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(BASE_DIR, 'app.db')
|
||||
# DATABASE_CONNECT_OPTIONS = {}
|
||||
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(BASE_DIR, 'app.db')
|
||||
DATABASE_CONNECT_OPTIONS = {}
|
||||
|
||||
# Application threads. A common general assumption is
|
||||
# using 2 per available processor cores - to handle
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
|
||||
Flask-SQLAlchemy
|
||||
----------------
|
||||
|
||||
Adds SQLAlchemy support to your Flask application.
|
||||
|
||||
Links
|
||||
`````
|
||||
|
||||
* `documentation <http://flask-sqlalchemy.pocoo.org>`_
|
||||
* `development version
|
||||
<http://github.com/mitsuhiko/flask-sqlalchemy/zipball/master#egg=Flask-SQLAlchemy-dev>`_
|
||||
|
||||
|
||||
|
|
@ -0,0 +1 @@
|
|||
pip
|
|
@ -0,0 +1,31 @@
|
|||
Copyright (c) 2014 by Armin Ronacher.
|
||||
|
||||
Some rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials provided
|
||||
with the distribution.
|
||||
|
||||
* The names of the contributors may not be used to endorse or
|
||||
promote products derived from this software without specific
|
||||
prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
@ -0,0 +1,43 @@
|
|||
Metadata-Version: 2.0
|
||||
Name: Flask-SQLAlchemy
|
||||
Version: 2.3.2
|
||||
Summary: Adds SQLAlchemy support to your Flask application
|
||||
Home-page: http://github.com/mitsuhiko/flask-sqlalchemy
|
||||
Author: Phil Howell
|
||||
Author-email: phil@quae.co.uk
|
||||
License: BSD
|
||||
Description-Content-Type: UNKNOWN
|
||||
Platform: any
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 2.6
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.3
|
||||
Classifier: Programming Language :: Python :: 3.4
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Requires-Dist: Flask (>=0.10)
|
||||
Requires-Dist: SQLAlchemy (>=0.8.0)
|
||||
|
||||
|
||||
Flask-SQLAlchemy
|
||||
----------------
|
||||
|
||||
Adds SQLAlchemy support to your Flask application.
|
||||
|
||||
Links
|
||||
`````
|
||||
|
||||
* `documentation <http://flask-sqlalchemy.pocoo.org>`_
|
||||
* `development version
|
||||
<http://github.com/mitsuhiko/flask-sqlalchemy/zipball/master#egg=Flask-SQLAlchemy-dev>`_
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
Flask_SQLAlchemy-2.3.2.dist-info/DESCRIPTION.rst,sha256=Mp4bpckSjf082xflOARFwzWLTnUszq7JxcY0dR9vD2w,273
|
||||
Flask_SQLAlchemy-2.3.2.dist-info/LICENSE.txt,sha256=2smrI3hNiP6c5TcX0fa6fqODgsdJVLC166X0kVxei9A,1457
|
||||
Flask_SQLAlchemy-2.3.2.dist-info/METADATA,sha256=iDXuOIujwz5MXBrH-I4WsW7kLKsY07feI7hggFHFfEk,1384
|
||||
Flask_SQLAlchemy-2.3.2.dist-info/RECORD,,
|
||||
Flask_SQLAlchemy-2.3.2.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110
|
||||
Flask_SQLAlchemy-2.3.2.dist-info/metadata.json,sha256=VOw756wP14azHrBwNxHIfbYkK4DkEPrCaV6Kf0VO36E,1257
|
||||
Flask_SQLAlchemy-2.3.2.dist-info/top_level.txt,sha256=w2K4fNNoTh4HItoFfz2FRQShSeLcvHYrzU_sZov21QU,17
|
||||
flask_sqlalchemy/__init__.py,sha256=0ZyibSbbC_Q1x8Kemp_2s2-NCowd_-CRvLyE1dPfnvw,35991
|
||||
flask_sqlalchemy/_compat.py,sha256=6rFcZZ3kxvyeJUC_FyB62mG1saNU8iQthzWHLDcKPVE,1057
|
||||
flask_sqlalchemy/model.py,sha256=7CTvGxxKmLscwcwq9mVT5ny_w301QZvTVjSqMoMx6DI,4974
|
||||
Flask_SQLAlchemy-2.3.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
flask_sqlalchemy/__pycache__/model.cpython-36.pyc,,
|
||||
flask_sqlalchemy/__pycache__/_compat.cpython-36.pyc,,
|
||||
flask_sqlalchemy/__pycache__/__init__.cpython-36.pyc,,
|
|
@ -0,0 +1,6 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.30.0)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py2-none-any
|
||||
Tag: py3-none-any
|
||||
|
|
@ -0,0 +1 @@
|
|||
{"classifiers": ["Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Software Development :: Libraries :: Python Modules", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6"], "description_content_type": "UNKNOWN", "extensions": {"python.details": {"contacts": [{"email": "phil@quae.co.uk", "name": "Phil Howell", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "http://github.com/mitsuhiko/flask-sqlalchemy"}}}, "extras": [], "generator": "bdist_wheel (0.30.0)", "license": "BSD", "metadata_version": "2.0", "name": "Flask-SQLAlchemy", "platform": "any", "run_requires": [{"requires": ["Flask (>=0.10)", "SQLAlchemy (>=0.8.0)"]}], "summary": "Adds SQLAlchemy support to your Flask application", "version": "2.3.2"}
|
|
@ -0,0 +1 @@
|
|||
flask_sqlalchemy
|
|
@ -0,0 +1 @@
|
|||
pip
|
|
@ -0,0 +1,20 @@
|
|||
This is the MIT license: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
Copyright (c) 2005-2018 the SQLAlchemy authors and contributors <see AUTHORS file>.
|
||||
SQLAlchemy is a trademark of Michael Bayer.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this
|
||||
software and associated documentation files (the "Software"), to deal in the Software
|
||||
without restriction, including without limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
|
||||
to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or
|
||||
substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
|
||||
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
|
||||
PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
|
||||
FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
|
@ -0,0 +1,180 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: SQLAlchemy
|
||||
Version: 1.2.7
|
||||
Summary: Database Abstraction Library
|
||||
Home-page: http://www.sqlalchemy.org
|
||||
Author: Mike Bayer
|
||||
Author-email: mike_mp@zzzcomputing.com
|
||||
License: MIT License
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Topic :: Database :: Front-Ends
|
||||
Classifier: Operating System :: OS Independent
|
||||
Provides-Extra: postgresql
|
||||
Provides-Extra: postgresql_psycopg2cffi
|
||||
Provides-Extra: oracle
|
||||
Provides-Extra: mssql_pymssql
|
||||
Provides-Extra: mysql
|
||||
Provides-Extra: postgresql_pg8000
|
||||
Provides-Extra: pymysql
|
||||
Provides-Extra: mssql_pyodbc
|
||||
Provides-Extra: mssql_pymssql
|
||||
Requires-Dist: pymssql; extra == 'mssql_pymssql'
|
||||
Provides-Extra: mssql_pyodbc
|
||||
Requires-Dist: pyodbc; extra == 'mssql_pyodbc'
|
||||
Provides-Extra: mysql
|
||||
Requires-Dist: mysqlclient; extra == 'mysql'
|
||||
Provides-Extra: oracle
|
||||
Requires-Dist: cx-oracle; extra == 'oracle'
|
||||
Provides-Extra: postgresql
|
||||
Requires-Dist: psycopg2; extra == 'postgresql'
|
||||
Provides-Extra: postgresql_pg8000
|
||||
Requires-Dist: pg8000; extra == 'postgresql_pg8000'
|
||||
Provides-Extra: postgresql_psycopg2cffi
|
||||
Requires-Dist: psycopg2cffi; extra == 'postgresql_psycopg2cffi'
|
||||
Provides-Extra: pymysql
|
||||
Requires-Dist: pymysql; extra == 'pymysql'
|
||||
|
||||
SQLAlchemy
|
||||
==========
|
||||
|
||||
The Python SQL Toolkit and Object Relational Mapper
|
||||
|
||||
Introduction
|
||||
-------------
|
||||
|
||||
SQLAlchemy is the Python SQL toolkit and Object Relational Mapper
|
||||
that gives application developers the full power and
|
||||
flexibility of SQL. SQLAlchemy provides a full suite
|
||||
of well known enterprise-level persistence patterns,
|
||||
designed for efficient and high-performing database
|
||||
access, adapted into a simple and Pythonic domain
|
||||
language.
|
||||
|
||||
Major SQLAlchemy features include:
|
||||
|
||||
* An industrial strength ORM, built
|
||||
from the core on the identity map, unit of work,
|
||||
and data mapper patterns. These patterns
|
||||
allow transparent persistence of objects
|
||||
using a declarative configuration system.
|
||||
Domain models
|
||||
can be constructed and manipulated naturally,
|
||||
and changes are synchronized with the
|
||||
current transaction automatically.
|
||||
* A relationally-oriented query system, exposing
|
||||
the full range of SQL's capabilities
|
||||
explicitly, including joins, subqueries,
|
||||
correlation, and most everything else,
|
||||
in terms of the object model.
|
||||
Writing queries with the ORM uses the same
|
||||
techniques of relational composition you use
|
||||
when writing SQL. While you can drop into
|
||||
literal SQL at any time, it's virtually never
|
||||
needed.
|
||||
* A comprehensive and flexible system
|
||||
of eager loading for related collections and objects.
|
||||
Collections are cached within a session,
|
||||
and can be loaded on individual access, all
|
||||
at once using joins, or by query per collection
|
||||
across the full result set.
|
||||
* A Core SQL construction system and DBAPI
|
||||
interaction layer. The SQLAlchemy Core is
|
||||
separate from the ORM and is a full database
|
||||
abstraction layer in its own right, and includes
|
||||
an extensible Python-based SQL expression
|
||||
language, schema metadata, connection pooling,
|
||||
type coercion, and custom types.
|
||||
* All primary and foreign key constraints are
|
||||
assumed to be composite and natural. Surrogate
|
||||
integer primary keys are of course still the
|
||||
norm, but SQLAlchemy never assumes or hardcodes
|
||||
to this model.
|
||||
* Database introspection and generation. Database
|
||||
schemas can be "reflected" in one step into
|
||||
Python structures representing database metadata;
|
||||
those same structures can then generate
|
||||
CREATE statements right back out - all within
|
||||
the Core, independent of the ORM.
|
||||
|
||||
SQLAlchemy's philosophy:
|
||||
|
||||
* SQL databases behave less and less like object
|
||||
collections the more size and performance start to
|
||||
matter; object collections behave less and less like
|
||||
tables and rows the more abstraction starts to matter.
|
||||
SQLAlchemy aims to accommodate both of these
|
||||
principles.
|
||||
* An ORM doesn't need to hide the "R". A relational
|
||||
database provides rich, set-based functionality
|
||||
that should be fully exposed. SQLAlchemy's
|
||||
ORM provides an open-ended set of patterns
|
||||
that allow a developer to construct a custom
|
||||
mediation layer between a domain model and
|
||||
a relational schema, turning the so-called
|
||||
"object relational impedance" issue into
|
||||
a distant memory.
|
||||
* The developer, in all cases, makes all decisions
|
||||
regarding the design, structure, and naming conventions
|
||||
of both the object model as well as the relational
|
||||
schema. SQLAlchemy only provides the means
|
||||
to automate the execution of these decisions.
|
||||
* With SQLAlchemy, there's no such thing as
|
||||
"the ORM generated a bad query" - you
|
||||
retain full control over the structure of
|
||||
queries, including how joins are organized,
|
||||
how subqueries and correlation is used, what
|
||||
columns are requested. Everything SQLAlchemy
|
||||
does is ultimately the result of a developer-
|
||||
initiated decision.
|
||||
* Don't use an ORM if the problem doesn't need one.
|
||||
SQLAlchemy consists of a Core and separate ORM
|
||||
component. The Core offers a full SQL expression
|
||||
language that allows Pythonic construction
|
||||
of SQL constructs that render directly to SQL
|
||||
strings for a target database, returning
|
||||
result sets that are essentially enhanced DBAPI
|
||||
cursors.
|
||||
* Transactions should be the norm. With SQLAlchemy's
|
||||
ORM, nothing goes to permanent storage until
|
||||
commit() is called. SQLAlchemy encourages applications
|
||||
to create a consistent means of delineating
|
||||
the start and end of a series of operations.
|
||||
* Never render a literal value in a SQL statement.
|
||||
Bound parameters are used to the greatest degree
|
||||
possible, allowing query optimizers to cache
|
||||
query plans effectively and making SQL injection
|
||||
attacks a non-issue.
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
||||
Latest documentation is at:
|
||||
|
||||
http://www.sqlalchemy.org/docs/
|
||||
|
||||
Installation / Requirements
|
||||
---------------------------
|
||||
|
||||
Full documentation for installation is at
|
||||
`Installation <http://www.sqlalchemy.org/docs/intro.html#installation>`_.
|
||||
|
||||
Getting Help / Development / Bug reporting
|
||||
------------------------------------------
|
||||
|
||||
Please refer to the `SQLAlchemy Community Guide <http://www.sqlalchemy.org/support.html>`_.
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
SQLAlchemy is distributed under the `MIT license
|
||||
<http://www.opensource.org/licenses/mit-license.php>`_.
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,389 @@
|
|||
SQLAlchemy-1.2.7.dist-info/LICENSE.txt,sha256=c72j19bFaPSUwdElIKl9QaOLfOkdiMIfG0abdHsX9_0,1229
|
||||
SQLAlchemy-1.2.7.dist-info/METADATA,sha256=A_yJ38mMXt0HPCCQmSygZzDIuJLFTla5MOdvA7aIfQ4,6563
|
||||
SQLAlchemy-1.2.7.dist-info/RECORD,,
|
||||
SQLAlchemy-1.2.7.dist-info/WHEEL,sha256=mvUKYmGtxB2TxBT6pBp7PWP3fRlxIMuhhGt_n6cLCfM,111
|
||||
SQLAlchemy-1.2.7.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11
|
||||
sqlalchemy/__init__.py,sha256=1rcr37W21337j8yeRlDiajEfktXSrl0bN3IhfdNC9_I,2248
|
||||
sqlalchemy/cprocessors.cpython-36m-darwin.so,sha256=B-fmwKbWl5VSardjbjK20R_ufnrcL5lERFiG5fxg3nI,16892
|
||||
sqlalchemy/cresultproxy.cpython-36m-darwin.so,sha256=B4yQy-ZsSH9WYcsMPpWlZ-bHCyMrcOVQMlfIWWhLcDQ,18844
|
||||
sqlalchemy/cutils.cpython-36m-darwin.so,sha256=8qsLljxJ52x8McGz0Jw3twwzx3wDAThDgKAfLxGFOTw,9756
|
||||
sqlalchemy/events.py,sha256=oy18Pqf1Sw0VV-l8tVPeEVn-EgLOSwOK9m68Dep4Yn8,49746
|
||||
sqlalchemy/exc.py,sha256=B8e-WMzl6nLJH1Zf29nSgiAPHdFQncJfnEVunytV6hI,14432
|
||||
sqlalchemy/inspection.py,sha256=OcKvEukttgf8L8hpp-sNS-6fJSvrClRUQAD_O3NWfXQ,3093
|
||||
sqlalchemy/interfaces.py,sha256=51RGQymBHqZ4Saj2PZEZ5xk-AGZ72AYKmoTTEBsTEPc,10967
|
||||
sqlalchemy/log.py,sha256=c9RRbdYa0iOw_6sgdISr0NcM6xJPe6BT2lrYtnoSxGA,6712
|
||||
sqlalchemy/pool.py,sha256=24YZGCJrHbMMdDlE-HLQg1u4IFz6xLuwPu0Kc7vHm1c,52412
|
||||
sqlalchemy/processors.py,sha256=xxZZ1TrStefh2oh0MiWTZQNuNJAi15IxpRs_zkFggXY,5104
|
||||
sqlalchemy/schema.py,sha256=S8tx3M3c-cAPZv9ADlzY2eE_cZ7NurHH8cWUHYHcSkw,1289
|
||||
sqlalchemy/types.py,sha256=PoH2ZBkg-fqdntFKFBAAQMSoyAVxWtI_kKvqeZADMDc,1713
|
||||
sqlalchemy/connectors/__init__.py,sha256=rrgdmDtnZrYz2KspapOs6wEkbRh0nnLSGAGZ1_9w2_o,278
|
||||
sqlalchemy/connectors/mxodbc.py,sha256=1JQJ9A9oBDamnA8hGjxTd0duDfa-KZLd-dYuBzpMcVc,5348
|
||||
sqlalchemy/connectors/pyodbc.py,sha256=c0Q60nv30NR7GohqxnabESUAO4CqXfyhzBUCG6HdR78,5648
|
||||
sqlalchemy/connectors/zxJDBC.py,sha256=KYrmLZt8Y5Qr49sja2ALf8IPnExjk4zsGrQqNJqgtxA,1868
|
||||
sqlalchemy/databases/__init__.py,sha256=ZOSHd9ibbeNSthK3xVTpl5lRJbA0fR1IUVskoytyrHE,817
|
||||
sqlalchemy/dialects/__init__.py,sha256=rrWo1qtl19-67MV9hxlbt5XYZJjC5BlTwdKGxd0PUUc,1346
|
||||
sqlalchemy/dialects/firebird/__init__.py,sha256=XfFW_BjZuiB2k0Mhc2PnWndfPu5viJMkC6QZ68sKHUQ,627
|
||||
sqlalchemy/dialects/firebird/base.py,sha256=J1uFdDD2FF84qFspip6bvl3gJm7xO4GW6YUiyfz96z8,28297
|
||||
sqlalchemy/dialects/firebird/fdb.py,sha256=OOEVOxB8o5tJvY-fyGkStlPXjIs6a877vmTBZuWAKko,4325
|
||||
sqlalchemy/dialects/firebird/kinterbasdb.py,sha256=Fj-KyDyXQm-scFDOwpePSweYOBn-EcnPJdEwLA4rxMk,6300
|
||||
sqlalchemy/dialects/mssql/__init__.py,sha256=rqhEBPQR7hHAvfQcNtQqlNspzwHP0n_lz5jzdx6ZIKk,1074
|
||||
sqlalchemy/dialects/mssql/adodbapi.py,sha256=pd-vPua2A8uZ1vulBKZplABa40mMgvhaObJxKk-RvAk,2692
|
||||
sqlalchemy/dialects/mssql/base.py,sha256=mdrwM9sx_eUi06qrFRb2YTpLKZsG82z4kQ5hj0utP7c,77872
|
||||
sqlalchemy/dialects/mssql/information_schema.py,sha256=FXS7NQtJ_9UHlIiz-3SF5eOTBzsxb0erkB4feB0s15E,6481
|
||||
sqlalchemy/dialects/mssql/mxodbc.py,sha256=U0Tu183J9d3YVJ18h-Ro6dUo7uPZdBAPrtDit0PDSkc,4604
|
||||
sqlalchemy/dialects/mssql/pymssql.py,sha256=BMapRUlKdwKlyqqOHu5DnZI1_L1IK8N5VCDMdWmlH-U,3852
|
||||
sqlalchemy/dialects/mssql/pyodbc.py,sha256=V074z3m5ELQf0Cp4G6wBH1Lzm7He8us53Z20OHGPKfM,10717
|
||||
sqlalchemy/dialects/mssql/zxjdbc.py,sha256=vGfY3F80odCna8_uVjguLPO0VXAyDwDobBQfJsByw7M,2282
|
||||
sqlalchemy/dialects/mysql/__init__.py,sha256=ak3nuGjushrYFvbM4f4xldmqPw_zaBe9vzmBlMsxHb0,1221
|
||||
sqlalchemy/dialects/mysql/base.py,sha256=aTp0Bfawv18EpkjtbJZlViHGwxeTqK8ocC7HUA4snU4,86787
|
||||
sqlalchemy/dialects/mysql/cymysql.py,sha256=T7eHTy4KqN8A4xpDktp2JSZjpVCMd1zB3TFZOoFV8Z4,1998
|
||||
sqlalchemy/dialects/mysql/dml.py,sha256=VyUXudSFgMfpivUFH_T049WH1Xo1CLXKLIjG1HyQMXw,2680
|
||||
sqlalchemy/dialects/mysql/enumerated.py,sha256=V9c9_71luaSUEoctXNAPLI8je-J18G1xaTv31SSv17g,11363
|
||||
sqlalchemy/dialects/mysql/gaerdbms.py,sha256=a94buqzpqDBhePvBANjCe5fOmRnQU8-PKK5B8qfLuK4,3387
|
||||
sqlalchemy/dialects/mysql/json.py,sha256=fpm8X6SBhX3vhBxBG1ia33Y33AmKrBkAUkuyvk66B0w,2071
|
||||
sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=eLKXT7bJf2RIjpI4oqUVKH0ExgsLdRuIXyPNV8IY2SA,6406
|
||||
sqlalchemy/dialects/mysql/mysqldb.py,sha256=2zD9kzFfXqa5Y4gmTB2VLNp_jnQGKoSWBvlLZp78wqc,6947
|
||||
sqlalchemy/dialects/mysql/oursql.py,sha256=m9F7sFDBFpRY0AqpDq6sBOl6d-CU4n3ZYOGyb0vvLdY,7776
|
||||
sqlalchemy/dialects/mysql/pymysql.py,sha256=zVSQXSgRRk8i2CSQqwrTdbVjWC88V8IlXG9GB1VUr4A,1989
|
||||
sqlalchemy/dialects/mysql/pyodbc.py,sha256=MHywwWXyOZ94Gl8fqSj_L8hNqfIOApFXhHZ6X1hZhxc,2657
|
||||
sqlalchemy/dialects/mysql/reflection.py,sha256=X38QEhDSmbKMG9diXHreXr-GlNLatR1W6J5Bymrxrao,16703
|
||||
sqlalchemy/dialects/mysql/types.py,sha256=yboSkXVM4Hfse8ZqaAHht1571XgQ-RHrXgj22pSTe7E,25137
|
||||
sqlalchemy/dialects/mysql/zxjdbc.py,sha256=-arAeE65pqUuMzBHLfzfMpBnTlNVtD2tkhZInomSP60,3937
|
||||
sqlalchemy/dialects/oracle/__init__.py,sha256=0aHajiUEgaE6jARlaSdsuurEXmhGTkeLX9EO99IDVMI,811
|
||||
sqlalchemy/dialects/oracle/base.py,sha256=1jyzM74yPlav6IPzfMrL_03NtoteqAvPi_mjfnEDOiU,65203
|
||||
sqlalchemy/dialects/oracle/cx_oracle.py,sha256=we4jy1SeOs3eNz5MivE0nq_7WOqOgu31n-OSu9p9FZ8,33135
|
||||
sqlalchemy/dialects/oracle/zxjdbc.py,sha256=cmDtRIa-cYqNLkzyZBlfyeigaEQFe7ffRWxxqRHyUBQ,8112
|
||||
sqlalchemy/dialects/postgresql/__init__.py,sha256=e74OBltKsvshxdDxacC-1JUDmhMa0qyw-nZ68TF341M,1529
|
||||
sqlalchemy/dialects/postgresql/array.py,sha256=hMO962lq4eZWNoXnKnq6tLX27b66-pZtmx7ON3hzj50,9952
|
||||
sqlalchemy/dialects/postgresql/base.py,sha256=ZqZYAiMsLrkgucxokYIj98Hl33HpDoyq6sfDVVW6d7U,113128
|
||||
sqlalchemy/dialects/postgresql/dml.py,sha256=1vcMv0EfFXmW_NeMjNHMhPOS7p6GyhhBWLQy531k8_o,7571
|
||||
sqlalchemy/dialects/postgresql/ext.py,sha256=ibuurm5e2ZVhpV4njwcbSyVJ1xJQtROXMA71spNiKwE,6860
|
||||
sqlalchemy/dialects/postgresql/hstore.py,sha256=qmcAGQsdqlMHPcGgSzqNr5ZIufskKb-Q9cqMXWbfhyE,12411
|
||||
sqlalchemy/dialects/postgresql/json.py,sha256=7KHjtiPDYlyrpnrIGo5JWpg8uC8xh_0BHzz0gvNOFso,9821
|
||||
sqlalchemy/dialects/postgresql/pg8000.py,sha256=0Yobx92-frW-BuVLM-lOVduhBsbhbX7inMWdZaQrz3w,9220
|
||||
sqlalchemy/dialects/postgresql/psycopg2.py,sha256=ythf8lkG7CVncRlsGNVYrw8XOlOpfHYw1S6z0c2aLik,27187
|
||||
sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=Ako0qThKTsuXiZB4t2ZWGysT1qrcfSJryalE2Ik5syw,1651
|
||||
sqlalchemy/dialects/postgresql/pygresql.py,sha256=JV589Bg3z_7bKieAP4lIqWzJpbR_O9DOmpCmH012c8Y,7611
|
||||
sqlalchemy/dialects/postgresql/pypostgresql.py,sha256=T-gI_Rz6CV0c1-RgWm8NliN9iZWoK2KNGIidcg_5KlM,2655
|
||||
sqlalchemy/dialects/postgresql/ranges.py,sha256=DJCITJtwFAJ74a3sYFizz69wm3FhfHPkzLDntGqhphw,4971
|
||||
sqlalchemy/dialects/postgresql/zxjdbc.py,sha256=KE-mXnKcl68PIvF11aBpiFiolCbH16n_LJMDlCBuWJQ,1397
|
||||
sqlalchemy/dialects/sqlite/__init__.py,sha256=cvuTxTCYYsC_GHaOCCVc_dpaymxy0lpfFv1c89QDQ48,720
|
||||
sqlalchemy/dialects/sqlite/base.py,sha256=ZVr368SQRntts4Y745jpZiQkv2Fda11Z5rcleNLbybI,59038
|
||||
sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=MLj6njhSj5RwQCs5PXQRtTO-jUCfCGfjLyQfp52tFus,4650
|
||||
sqlalchemy/dialects/sqlite/pysqlite.py,sha256=aAhdzjnyJrVrOLU_W2QB3YXY6qcOpbOqUpPZVx7GTMQ,14975
|
||||
sqlalchemy/dialects/sybase/__init__.py,sha256=gh1f630fgd5Wow5tfr5HPWxKBMqFn6YSZYAQ2EZX2yA,873
|
||||
sqlalchemy/dialects/sybase/base.py,sha256=F-izMkFqZJ7cdS7P0sX_GuRp91ZqbWLrbwO5VogmPfE,29456
|
||||
sqlalchemy/dialects/sybase/mxodbc.py,sha256=EHt1U0g449XNX8tN0oTxIkVterSipz9HErniuo7upZs,901
|
||||
sqlalchemy/dialects/sybase/pyodbc.py,sha256=bOkWunYEOfA7lLL59S_yxJ11E6QkFuBN5YjnUlKJkXQ,2102
|
||||
sqlalchemy/dialects/sybase/pysybase.py,sha256=UNcGk5DQ4T05AeozHlm68JTIQDcha5pvAzGj9VtKWlY,3208
|
||||
sqlalchemy/engine/__init__.py,sha256=mHlCzrEq22sQXy2jgPwCro-KIqh-6e7v3_BvDG6XVck,20438
|
||||
sqlalchemy/engine/base.py,sha256=WpQwUwXvdkR-OJIhE6h4hBmTWJEhEcnw710XswWHuno,83270
|
||||
sqlalchemy/engine/default.py,sha256=NPhaoSpinqFfYqt1SoJ8JCZB-fto_P7WRzb3Epixl4k,48863
|
||||
sqlalchemy/engine/interfaces.py,sha256=sX6XTe1YDGAU8HIt9XRlylbNAR58EiONEwGCdcmZX_A,41554
|
||||
sqlalchemy/engine/reflection.py,sha256=Ft7eFqEK8phKXcGPwJqMevW7T96DheGNANpwMNTJ7os,31894
|
||||
sqlalchemy/engine/result.py,sha256=kDPnsNcwj1w5XGE_tGZM75g0hamsiWrNvLBWTzTbn0c,51717
|
||||
sqlalchemy/engine/strategies.py,sha256=Sozz1zXVkyjW6I4jsgYVcsJp8KYgN4YkPqvsb3sUCZQ,9552
|
||||
sqlalchemy/engine/threadlocal.py,sha256=_zaDX4IcY7wcdQpoSIZYR6LujR5VrbmkpiPybW2dg5s,4186
|
||||
sqlalchemy/engine/url.py,sha256=1s60hXA1oen8E1VeUUOKWvVVQldAvwxJpOMHDUiisl4,9181
|
||||
sqlalchemy/engine/util.py,sha256=iP2jhZWMEFi6eBR5hXyp2KDd4XqJKrGE3tA_QeCwU_w,2338
|
||||
sqlalchemy/event/__init__.py,sha256=MTOLXdVCMskX_yRE5dZwWHNkMLkv-y9caaM9XFwlR8E,419
|
||||
sqlalchemy/event/api.py,sha256=U8n3cmqB9DaO6UYud2RHMNXsEoELmKzhQRXPEq1q-qQ,5990
|
||||
sqlalchemy/event/attr.py,sha256=Bpwt2BDboQy1oAtjJ2T2LsXwjALXPf3nKZmEnqx_RHg,12905
|
||||
sqlalchemy/event/base.py,sha256=3DoHv2ecDAtNN3b-F2o2yCVp4uWcAiv_OcEE-Lvwov8,9540
|
||||
sqlalchemy/event/legacy.py,sha256=hsxGTAa3aKkCgByaiBF_MddZxY0ORp2CYEfe5kQy93U,5814
|
||||
sqlalchemy/event/registry.py,sha256=L3DdvjGeESvn_Pmj7Fkm2wbMOrHUkdhAkkJeR7ij7cg,7786
|
||||
sqlalchemy/ext/__init__.py,sha256=dZJYsRwHsMYpTkUyMDACt0CVVxqe-EvP0bTCgBH0gKE,322
|
||||
sqlalchemy/ext/associationproxy.py,sha256=ifkmGlpwVAkW9VcgBQ1yCVSaz-P9KdNfGnX-DeB1FUo,36302
|
||||
sqlalchemy/ext/automap.py,sha256=ojCBWf4v37SatG1s1VxGS1s2EZBq13H8chMTEMyyrzE,42099
|
||||
sqlalchemy/ext/baked.py,sha256=mw8qoNcyA1lbM4ffV57FsJS9NGPhodKqU7mGIva9cBo,18171
|
||||
sqlalchemy/ext/compiler.py,sha256=vmIz7I8mOpihaZDhAcNpeky48IIeB8QMTgxxPMFtmJ4,16845
|
||||
sqlalchemy/ext/horizontal_shard.py,sha256=zT5JyUrSlhcN5pdbmLFtLeelB-MEj2WJ1opNZf8HB00,7364
|
||||
sqlalchemy/ext/hybrid.py,sha256=V-ktQSoLFcMgD0D0Mgbsl9QeHcUHMA78fJgpdmO44w4,39164
|
||||
sqlalchemy/ext/indexable.py,sha256=SDNEPbRxtIR535iy8-7H976WbNQJSnJ_4oaXnyGJbko,11128
|
||||
sqlalchemy/ext/instrumentation.py,sha256=HRgNiuYJ90_uSKC1iDwsEl8_KXscMQkEb9KeElk-yLE,14856
|
||||
sqlalchemy/ext/mutable.py,sha256=2qgpHBVm1b6zy3IKGofM3-8PwUkCIUr8vwGpDN_nFMY,32531
|
||||
sqlalchemy/ext/orderinglist.py,sha256=-pUrJOsZkjOt7F0G_6y38h8rq0cYltvFMHjv4WHlRaI,13816
|
||||
sqlalchemy/ext/serializer.py,sha256=2M_aXqRksCYNd_Sef-l4AZ8hRFKOMYJD7Y5xyCqO_E4,5608
|
||||
sqlalchemy/ext/declarative/__init__.py,sha256=8dR4W5dZ6DztbqaOp5g0OS8aqWIwTGGDA4uVyJ-BHoc,756
|
||||
sqlalchemy/ext/declarative/api.py,sha256=4s67E7i0YQm9f5ucL2bEvBJnRcNjS0XYRo_EILa0srA,25066
|
||||
sqlalchemy/ext/declarative/base.py,sha256=-lu3BtZUtcL9vGZ3-c_lF_10eJ1ylIX1l5I3bE0Xrwk,27481
|
||||
sqlalchemy/ext/declarative/clsregistry.py,sha256=CwCu667YYxnXPWfsxU_w3cLou4BODKHh2BjVvozkPAw,10817
|
||||
sqlalchemy/orm/__init__.py,sha256=Um0BgXXSPuVe2D-PV0ioQopLAVm0LSbnEzQZB0g-LAI,8763
|
||||
sqlalchemy/orm/attributes.py,sha256=qb2xCad09bDfFDDk_SIEhDUoNodDrHAMg5xFnCsypW4,60528
|
||||
sqlalchemy/orm/base.py,sha256=x0BQD-UoaSySthekaCWzs8yT9IJeRyO_AgimQbBlkpg,14668
|
||||
sqlalchemy/orm/collections.py,sha256=fH5nFWzoTeeCYeuaaqTsibY_3IU7WpwYdfjLNMdVSsw,52449
|
||||
sqlalchemy/orm/dependency.py,sha256=hXpJsTfC4eKZyjPm6AOXOQaMb4geIwn-IGpBaXBNfME,45802
|
||||
sqlalchemy/orm/deprecated_interfaces.py,sha256=5Vzc_YJVzjJDTz3jDIj6S3Uh59NAcFfmCV5AYVUlY_s,18254
|
||||
sqlalchemy/orm/descriptor_props.py,sha256=Enhwqjqmt8EiyCgxKcOeDUauonT4yQWg5ncYF4RTkQw,27751
|
||||
sqlalchemy/orm/dynamic.py,sha256=Wo3ogq54-zFOnkl0kOyeV91e3McjF84ydur0t71rJEk,13171
|
||||
sqlalchemy/orm/evaluator.py,sha256=GH81gACGkSBkynmfqHgqlZYjU3wv9pXqnZ0hGnxGuFY,5411
|
||||
sqlalchemy/orm/events.py,sha256=MpbZ50TZK3PHT4_fSIdoIJ2P-nDugwd_gsnqZpcintg,88324
|
||||
sqlalchemy/orm/exc.py,sha256=CgXQOVleTlrPxag-cqbLWpvYZEHAP4qgoL8AV-JuRGM,5459
|
||||
sqlalchemy/orm/identity.py,sha256=lC9aPcReGjm7Lj7FlFhfFQR5_Ek0jvKXiC_5yBsb-1A,10255
|
||||
sqlalchemy/orm/instrumentation.py,sha256=fDfrl1KDyfQOG-y1XvqQvRtflUcU8mX0cyyWBafINtc,18819
|
||||
sqlalchemy/orm/interfaces.py,sha256=4SDf6tZQjVncR2_cBO30zo4IA13eOHNpjVLQnh1NzaE,23173
|
||||
sqlalchemy/orm/loading.py,sha256=tzy5MVeh7EumbdMT4g5XruZJNmatHMPsWW8modbUJP4,31150
|
||||
sqlalchemy/orm/mapper.py,sha256=CZpazUb9WsSIz_i8PUNt7o05E2JUU5OGgwuyqiU_CRQ,125593
|
||||
sqlalchemy/orm/path_registry.py,sha256=0KNqaTaHFchUkEGZ9ivXal2Eo2DLri-DQTgRVuB7wDA,7751
|
||||
sqlalchemy/orm/persistence.py,sha256=aItA-dvAesmxJLK_SUoanhBcFvfEWAzJkUit6CjE1jc,59549
|
||||
sqlalchemy/orm/properties.py,sha256=4kJybUUAshFg1M12BliUWDxhlhQIQ1_2FpripXbygJQ,10458
|
||||
sqlalchemy/orm/query.py,sha256=gxvAfil-QSvwSK8vzQVPpoGCrHYLo5etImwKYLJtNnc,157918
|
||||
sqlalchemy/orm/relationships.py,sha256=SlPjGqMziCgjm4Ku67U_GdQbSLFVrleitTqV-tUCP4o,118398
|
||||
sqlalchemy/orm/scoping.py,sha256=DOCDDbBCQFvibuVinlJjGlp_UyJCrZ_QONo-9wOwG9U,6272
|
||||
sqlalchemy/orm/session.py,sha256=5nZ_aXdy-2cwwV3IpGwRRj4rW0HobfpJFEF2tGrAqY0,123364
|
||||
sqlalchemy/orm/state.py,sha256=UXxJoFtRep42l1k9L5HUMZ7vNcu8MLBYcs6KmXqH8uw,28837
|
||||
sqlalchemy/orm/strategies.py,sha256=Xu6_KZq737mIYPG2ifM6F610uIK66I44lpVONg5J134,73937
|
||||
sqlalchemy/orm/strategy_options.py,sha256=K1GZHtna29IjI1kS0TiJ5STfoDIqsZLbaAdn3Fn1a9Y,48321
|
||||
sqlalchemy/orm/sync.py,sha256=ZkUytu_ucR9qMvj0rYUHCbx44qM2X_taMZe2BKLhhJc,5451
|
||||
sqlalchemy/orm/unitofwork.py,sha256=zvHw5dJmJw2m6q5GpKxC4NJ-88_40HltUrL_zgglU5I,24586
|
||||
sqlalchemy/orm/util.py,sha256=B3KRGZYgQlzdFPsgDVY6LZLaRo_FeqAvCMXvKeNQ1No,41244
|
||||
sqlalchemy/sql/__init__.py,sha256=xoVOg-2Frtowwp_2SjtsncUNkN6fZ04DODnZ0eBmivA,1869
|
||||
sqlalchemy/sql/annotation.py,sha256=QQmA_W2SOpCehVyTM8BfK3aZD8rABVT9G6Z2I0MTM7o,6654
|
||||
sqlalchemy/sql/base.py,sha256=DquGOb23-W1vid2TxTfmMpPGNnwcW5PhflqcAuH2x7o,21012
|
||||
sqlalchemy/sql/compiler.py,sha256=IqBL5dsRNqqWN9vdvLGc3eviMA2q0r80Vi2HCLhf9xs,114575
|
||||
sqlalchemy/sql/crud.py,sha256=9OjyAOXZ0xTgwn3hK1b8hzwS9zj-G3hBGVaR3F3o61o,25069
|
||||
sqlalchemy/sql/ddl.py,sha256=KOGv8IxBRSnCraolx4T4p-Od4SXkc0N-AvH5AsLErbM,38760
|
||||
sqlalchemy/sql/default_comparator.py,sha256=XpYI3Jqn72SpigQyy1T618L7NubMjacfvFTrbKeK2VY,12032
|
||||
sqlalchemy/sql/dml.py,sha256=TgQAtNKW6LgTpbo4BZxEAbHtJDcUUtRTaE5dH0ZuvPg,34451
|
||||
sqlalchemy/sql/elements.py,sha256=rJF2A-uAOaH7p46fLcqbhFnXcBO82MCmhie0tBjzISE,150021
|
||||
sqlalchemy/sql/expression.py,sha256=6JyyfdqdtuyJiIU3h_U8N6NrylJ-Cg5p66ahsid2Fkk,6335
|
||||
sqlalchemy/sql/functions.py,sha256=YnvRENTPs5OWSYjbilTtoR42qElEHxnJ3JAUB9lER2U,26284
|
||||
sqlalchemy/sql/naming.py,sha256=8wxA_omnCzrJQ4jO5DdGJYN5zAlYWaeiZvaT8gKei3M,4662
|
||||
sqlalchemy/sql/operators.py,sha256=RuTdtr8VB_34CsPH1VTuDjTf0imjLOSRhk7Sc2Kv9JU,39449
|
||||
sqlalchemy/sql/schema.py,sha256=YgvE_fFKqs17_iM547vrvd-w26vYLGDfKsV_iJutMoQ,159245
|
||||
sqlalchemy/sql/selectable.py,sha256=ojYr2AeS4Pe02hxenCZcLkOPRuCbK8W5R1frWnRYIFQ,127948
|
||||
sqlalchemy/sql/sqltypes.py,sha256=mgYdF0Buvuz97Lhppt1tFsFNBypL5R8dWxUrBPkefqM,92725
|
||||
sqlalchemy/sql/type_api.py,sha256=C5RiJPgz3gKBwJbDOa_F4eY7Mkj4zB18qHRhCmu_s5c,49790
|
||||
sqlalchemy/sql/util.py,sha256=iKrQYXL0aCACOZzl0yoQRXemZERErzRtT6Ca9KE7Ggw,24844
|
||||
sqlalchemy/sql/visitors.py,sha256=VZNpyVTjeW6JBdLNTzdUGgzhsWfuwseq83CQaHNE7p0,10266
|
||||
sqlalchemy/testing/__init__.py,sha256=lS_k5h5ApYxpAx1oGzzKh7DlMNT-ECWRlqaBjYh8y3g,1146
|
||||
sqlalchemy/testing/assertions.py,sha256=3Px0lTEEiNQXnzHPMj31y6gDyamBuruGKOzgdVCB5CU,17210
|
||||
sqlalchemy/testing/assertsql.py,sha256=rPuxqGDk6VeksC9H3yAzvHLB8Detx5p6yKouiOEFAuQ,13249
|
||||
sqlalchemy/testing/config.py,sha256=OZ53Y2cqFONCo-gdgexRiMmUkVdLhZzu13RInI97c8k,2729
|
||||
sqlalchemy/testing/engines.py,sha256=2EO6a9IRJ6LWE66Ur1JrUF3jHXkrXPtl_0ImWCaiGv0,10104
|
||||
sqlalchemy/testing/entities.py,sha256=hM-4tFsUSD6aYmrNNPk40Pl-r1igkRpamQRhvxptlQE,2992
|
||||
sqlalchemy/testing/exclusions.py,sha256=pHmjMhl9sPX2cR2CHBxxJGCrlCewwYZMl2X70-3pd8E,12647
|
||||
sqlalchemy/testing/fixtures.py,sha256=6eywiWK8bdxdK-4I3fqxyDOHqX3Eo6xGad7wI3NLn2o,10733
|
||||
sqlalchemy/testing/mock.py,sha256=FHzc3v4APchNIr_CsQmGFGFGjgYhEWEC7ldOFksicDE,630
|
||||
sqlalchemy/testing/pickleable.py,sha256=dBtRFeAp26iIc2IRnSiGVS3sM5Mv8yM-d6peMAKvX08,2641
|
||||
sqlalchemy/testing/profiling.py,sha256=1AZ3YOAZgyZmHSvryBU1d9G_uQ2uDog1BS1DUcuECEk,8392
|
||||
sqlalchemy/testing/provision.py,sha256=PleM3ZOllo_T50ADy5rJrlifnmD4qMFOaQqI4fLgEek,12934
|
||||
sqlalchemy/testing/replay_fixture.py,sha256=iAxg7XsFkKSCcJnrNPQNJfjMxOgeBAa-ShOkywWPJ4w,5429
|
||||
sqlalchemy/testing/requirements.py,sha256=X5sZ9uRIZ6Koro7DVTOpm8eSmfDj_Y2nrYaI7cMbmbw,25655
|
||||
sqlalchemy/testing/runner.py,sha256=-eFBB4f_Io7Hkpmx6Q-84zI_lLiiB1x_QX_9R205O4Q,1607
|
||||
sqlalchemy/testing/schema.py,sha256=B8TbUi7O62nSvCDmGbwIVblWRUlA0XX_LRyN_Zz3rE0,3496
|
||||
sqlalchemy/testing/util.py,sha256=Lck4smouPjDXAk24gFDJt8LfFhNfg2bz6hhNHHqYr3E,7535
|
||||
sqlalchemy/testing/warnings.py,sha256=TxPoKoCaIrDiIPovvyqI31WafMwJwi8YjDOQgRV-X1A,1301
|
||||
sqlalchemy/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
sqlalchemy/testing/plugin/bootstrap.py,sha256=Iw8R-d1gqoz_NKFtPyGfdX56QPcQHny_9Lvwov65aVY,1634
|
||||
sqlalchemy/testing/plugin/noseplugin.py,sha256=BS3TkwihocaxBVRexAya2KmQN18EVOEEb0Q2wi-o5Nk,2883
|
||||
sqlalchemy/testing/plugin/plugin_base.py,sha256=B4kUwBaqyecAC06pjXO_pvWawbY112mjM21t-3m2Kzk,18311
|
||||
sqlalchemy/testing/plugin/pytestplugin.py,sha256=Oderf9DyC-nHlrC9GRig-mzwEuFsKOsDg2TN64wDnz0,6937
|
||||
sqlalchemy/testing/suite/__init__.py,sha256=wqCTrb28i5FwhQZOyXVlnz3mA94iQOUBio7lszkFq-g,471
|
||||
sqlalchemy/testing/suite/test_ddl.py,sha256=Baw0ou9nKdADmrRuXgWzF1FZx0rvkkw3JHc6yw5BN0M,1838
|
||||
sqlalchemy/testing/suite/test_dialect.py,sha256=iGTMfkkG9yg_RQVRVesLaeg6ITLYDpbCTJSR47Ua-V4,3468
|
||||
sqlalchemy/testing/suite/test_insert.py,sha256=kpiW5l4O8KP6V1L0NEy4hdyXOqsGFX7USXZGTx2Hd84,9637
|
||||
sqlalchemy/testing/suite/test_reflection.py,sha256=3XUGMmPA3GLWqxOf57qAxbztuEWGJq5EAOR6XsZay90,36555
|
||||
sqlalchemy/testing/suite/test_results.py,sha256=UMT5_s0p2_ngse6-ei-UalcXao70IkNYLzm_5s0gKBo,11737
|
||||
sqlalchemy/testing/suite/test_select.py,sha256=aITpc6jE4ORZj33acKbi-DzmkheSDTidRGM_JMObaVc,16001
|
||||
sqlalchemy/testing/suite/test_sequence.py,sha256=jyEM4sBMA2sXoJVHTSOd8vXSVX1gl85XmEBccuu4Clg,4560
|
||||
sqlalchemy/testing/suite/test_types.py,sha256=XwUjKq-mcz0G5EiKP8fpxwKaAO2UyPI21_TLs1lkIgM,26753
|
||||
sqlalchemy/testing/suite/test_update_delete.py,sha256=r5p467r-EUsjEcWGfUE0VPIfN4LLXZpLRnnyBLyyjl4,1582
|
||||
sqlalchemy/util/__init__.py,sha256=zW-FdejHeXtIDdknAKwXyr-OkSkAaQYZOEbBsIo5dz0,2535
|
||||
sqlalchemy/util/_collections.py,sha256=4iPg7skIdOppZyMmyZwGX-WAGe2_UIojctpS-LE3Ga0,28280
|
||||
sqlalchemy/util/compat.py,sha256=OktDdrOx_3go2DybBl_uqvhLDqKMZMiiMDX0zRYuHpU,6910
|
||||
sqlalchemy/util/deprecations.py,sha256=VYVbKqkvRQ8jtzsKpihCCahj90PjKHi1WSQWhdOpl8w,4403
|
||||
sqlalchemy/util/langhelpers.py,sha256=mjfctdRMSr86C695sWHZuqpa19vmfgkSk9G2w-gOZ9M,42624
|
||||
sqlalchemy/util/queue.py,sha256=7vCGAhE4i7H_Am7-e5F6sVgTMNoXBpNGBnwN9Juj9cM,6548
|
||||
sqlalchemy/util/topological.py,sha256=WG0jWfUc_lPS0Blau9GElTux7Gv2nStTKXBqQlCvmU4,2794
|
||||
SQLAlchemy-1.2.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
sqlalchemy/connectors/__pycache__/mxodbc.cpython-36.pyc,,
|
||||
sqlalchemy/connectors/__pycache__/zxJDBC.cpython-36.pyc,,
|
||||
sqlalchemy/connectors/__pycache__/pyodbc.cpython-36.pyc,,
|
||||
sqlalchemy/connectors/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/databases/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/util/__pycache__/topological.cpython-36.pyc,,
|
||||
sqlalchemy/util/__pycache__/langhelpers.cpython-36.pyc,,
|
||||
sqlalchemy/util/__pycache__/deprecations.cpython-36.pyc,,
|
||||
sqlalchemy/util/__pycache__/compat.cpython-36.pyc,,
|
||||
sqlalchemy/util/__pycache__/_collections.cpython-36.pyc,,
|
||||
sqlalchemy/util/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/util/__pycache__/queue.cpython-36.pyc,,
|
||||
sqlalchemy/ext/__pycache__/automap.cpython-36.pyc,,
|
||||
sqlalchemy/ext/__pycache__/serializer.cpython-36.pyc,,
|
||||
sqlalchemy/ext/__pycache__/associationproxy.cpython-36.pyc,,
|
||||
sqlalchemy/ext/__pycache__/hybrid.cpython-36.pyc,,
|
||||
sqlalchemy/ext/__pycache__/orderinglist.cpython-36.pyc,,
|
||||
sqlalchemy/ext/__pycache__/baked.cpython-36.pyc,,
|
||||
sqlalchemy/ext/__pycache__/mutable.cpython-36.pyc,,
|
||||
sqlalchemy/ext/__pycache__/compiler.cpython-36.pyc,,
|
||||
sqlalchemy/ext/__pycache__/horizontal_shard.cpython-36.pyc,,
|
||||
sqlalchemy/ext/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/ext/__pycache__/indexable.cpython-36.pyc,,
|
||||
sqlalchemy/ext/__pycache__/instrumentation.cpython-36.pyc,,
|
||||
sqlalchemy/ext/declarative/__pycache__/clsregistry.cpython-36.pyc,,
|
||||
sqlalchemy/ext/declarative/__pycache__/base.cpython-36.pyc,,
|
||||
sqlalchemy/ext/declarative/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/ext/declarative/__pycache__/api.cpython-36.pyc,,
|
||||
sqlalchemy/__pycache__/inspection.cpython-36.pyc,,
|
||||
sqlalchemy/__pycache__/interfaces.cpython-36.pyc,,
|
||||
sqlalchemy/__pycache__/processors.cpython-36.pyc,,
|
||||
sqlalchemy/__pycache__/types.cpython-36.pyc,,
|
||||
sqlalchemy/__pycache__/pool.cpython-36.pyc,,
|
||||
sqlalchemy/__pycache__/exc.cpython-36.pyc,,
|
||||
sqlalchemy/__pycache__/log.cpython-36.pyc,,
|
||||
sqlalchemy/__pycache__/schema.cpython-36.pyc,,
|
||||
sqlalchemy/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/__pycache__/events.cpython-36.pyc,,
|
||||
sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-36.pyc,,
|
||||
sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-36.pyc,,
|
||||
sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-36.pyc,,
|
||||
sqlalchemy/testing/plugin/__pycache__/noseplugin.cpython-36.pyc,,
|
||||
sqlalchemy/testing/plugin/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/testing/__pycache__/util.cpython-36.pyc,,
|
||||
sqlalchemy/testing/__pycache__/fixtures.cpython-36.pyc,,
|
||||
sqlalchemy/testing/__pycache__/replay_fixture.cpython-36.pyc,,
|
||||
sqlalchemy/testing/__pycache__/config.cpython-36.pyc,,
|
||||
sqlalchemy/testing/__pycache__/requirements.cpython-36.pyc,,
|
||||
sqlalchemy/testing/__pycache__/exclusions.cpython-36.pyc,,
|
||||
sqlalchemy/testing/__pycache__/assertsql.cpython-36.pyc,,
|
||||
sqlalchemy/testing/__pycache__/pickleable.cpython-36.pyc,,
|
||||
sqlalchemy/testing/__pycache__/warnings.cpython-36.pyc,,
|
||||
sqlalchemy/testing/__pycache__/mock.cpython-36.pyc,,
|
||||
sqlalchemy/testing/__pycache__/provision.cpython-36.pyc,,
|
||||
sqlalchemy/testing/__pycache__/schema.cpython-36.pyc,,
|
||||
sqlalchemy/testing/__pycache__/engines.cpython-36.pyc,,
|
||||
sqlalchemy/testing/__pycache__/profiling.cpython-36.pyc,,
|
||||
sqlalchemy/testing/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/testing/__pycache__/entities.cpython-36.pyc,,
|
||||
sqlalchemy/testing/__pycache__/runner.cpython-36.pyc,,
|
||||
sqlalchemy/testing/__pycache__/assertions.cpython-36.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-36.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_insert.cpython-36.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-36.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-36.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_results.cpython-36.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-36.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_select.cpython-36.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-36.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_types.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/sybase/__pycache__/base.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/zxjdbc.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/array.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/base.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/pygresql.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/json.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/zxjdbc.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/base.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/base.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/zxjdbc.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/types.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/base.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/gaerdbms.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/dml.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/json.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/adodbapi.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/zxjdbc.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/base.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/firebird/__pycache__/base.cpython-36.pyc,,
|
||||
sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/util.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/descriptor_props.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/loading.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/dynamic.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/interfaces.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/query.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/properties.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/state.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/persistence.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/path_registry.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/strategies.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/exc.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/attributes.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/base.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/unitofwork.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/collections.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/scoping.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/identity.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/dependency.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/session.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/evaluator.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/strategy_options.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/events.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/sync.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/deprecated_interfaces.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/mapper.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/relationships.cpython-36.pyc,,
|
||||
sqlalchemy/orm/__pycache__/instrumentation.cpython-36.pyc,,
|
||||
sqlalchemy/engine/__pycache__/util.cpython-36.pyc,,
|
||||
sqlalchemy/engine/__pycache__/interfaces.cpython-36.pyc,,
|
||||
sqlalchemy/engine/__pycache__/reflection.cpython-36.pyc,,
|
||||
sqlalchemy/engine/__pycache__/url.cpython-36.pyc,,
|
||||
sqlalchemy/engine/__pycache__/result.cpython-36.pyc,,
|
||||
sqlalchemy/engine/__pycache__/strategies.cpython-36.pyc,,
|
||||
sqlalchemy/engine/__pycache__/default.cpython-36.pyc,,
|
||||
sqlalchemy/engine/__pycache__/base.cpython-36.pyc,,
|
||||
sqlalchemy/engine/__pycache__/threadlocal.cpython-36.pyc,,
|
||||
sqlalchemy/engine/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/event/__pycache__/attr.cpython-36.pyc,,
|
||||
sqlalchemy/event/__pycache__/base.cpython-36.pyc,,
|
||||
sqlalchemy/event/__pycache__/registry.cpython-36.pyc,,
|
||||
sqlalchemy/event/__pycache__/legacy.cpython-36.pyc,,
|
||||
sqlalchemy/event/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/event/__pycache__/api.cpython-36.pyc,,
|
||||
sqlalchemy/sql/__pycache__/util.cpython-36.pyc,,
|
||||
sqlalchemy/sql/__pycache__/elements.cpython-36.pyc,,
|
||||
sqlalchemy/sql/__pycache__/operators.cpython-36.pyc,,
|
||||
sqlalchemy/sql/__pycache__/selectable.cpython-36.pyc,,
|
||||
sqlalchemy/sql/__pycache__/crud.cpython-36.pyc,,
|
||||
sqlalchemy/sql/__pycache__/visitors.cpython-36.pyc,,
|
||||
sqlalchemy/sql/__pycache__/expression.cpython-36.pyc,,
|
||||
sqlalchemy/sql/__pycache__/functions.cpython-36.pyc,,
|
||||
sqlalchemy/sql/__pycache__/naming.cpython-36.pyc,,
|
||||
sqlalchemy/sql/__pycache__/compiler.cpython-36.pyc,,
|
||||
sqlalchemy/sql/__pycache__/ddl.cpython-36.pyc,,
|
||||
sqlalchemy/sql/__pycache__/base.cpython-36.pyc,,
|
||||
sqlalchemy/sql/__pycache__/annotation.cpython-36.pyc,,
|
||||
sqlalchemy/sql/__pycache__/type_api.cpython-36.pyc,,
|
||||
sqlalchemy/sql/__pycache__/schema.cpython-36.pyc,,
|
||||
sqlalchemy/sql/__pycache__/dml.cpython-36.pyc,,
|
||||
sqlalchemy/sql/__pycache__/sqltypes.cpython-36.pyc,,
|
||||
sqlalchemy/sql/__pycache__/__init__.cpython-36.pyc,,
|
||||
sqlalchemy/sql/__pycache__/default_comparator.cpython-36.pyc,,
|
|
@ -0,0 +1,5 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.31.0)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp36-cp36m-macosx_10_13_x86_64
|
||||
|
|
@ -0,0 +1 @@
|
|||
sqlalchemy
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,54 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
flask_sqlalchemy._compat
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Internal Python 2.x/3.x compatibility layer.
|
||||
|
||||
:copyright: (c) 2013 by Daniel Neuhäuser
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
import sys
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
|
||||
|
||||
if PY2:
|
||||
def iteritems(d):
|
||||
return d.iteritems()
|
||||
|
||||
def itervalues(d):
|
||||
return d.itervalues()
|
||||
|
||||
xrange = xrange
|
||||
|
||||
string_types = (unicode, bytes)
|
||||
|
||||
def to_str(x, charset='utf8', errors='strict'):
|
||||
if x is None or isinstance(x, str):
|
||||
return x
|
||||
|
||||
if isinstance(x, unicode):
|
||||
return x.encode(charset, errors)
|
||||
|
||||
return str(x)
|
||||
|
||||
else:
|
||||
def iteritems(d):
|
||||
return iter(d.items())
|
||||
|
||||
def itervalues(d):
|
||||
return iter(d.values())
|
||||
|
||||
xrange = range
|
||||
|
||||
string_types = (str,)
|
||||
|
||||
def to_str(x, charset='utf8', errors='strict'):
|
||||
if x is None or isinstance(x, str):
|
||||
return x
|
||||
|
||||
if isinstance(x, bytes):
|
||||
return x.decode(charset, errors)
|
||||
|
||||
return str(x)
|
|
@ -0,0 +1,154 @@
|
|||
import re
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import inspect
|
||||
from sqlalchemy.ext.declarative import DeclarativeMeta, declared_attr
|
||||
from sqlalchemy.schema import _get_table_key
|
||||
|
||||
from ._compat import to_str
|
||||
|
||||
|
||||
def should_set_tablename(cls):
|
||||
"""Determine whether ``__tablename__`` should be automatically generated
|
||||
for a model.
|
||||
|
||||
* If no class in the MRO sets a name, one should be generated.
|
||||
* If a declared attr is found, it should be used instead.
|
||||
* If a name is found, it should be used if the class is a mixin, otherwise
|
||||
one should be generated.
|
||||
* Abstract models should not have one generated.
|
||||
|
||||
Later, :meth:`._BoundDeclarativeMeta.__table_cls__` will determine if the
|
||||
model looks like single or joined-table inheritance. If no primary key is
|
||||
found, the name will be unset.
|
||||
"""
|
||||
if (
|
||||
cls.__dict__.get('__abstract__', False)
|
||||
or not any(isinstance(b, DeclarativeMeta) for b in cls.__mro__[1:])
|
||||
):
|
||||
return False
|
||||
|
||||
for base in cls.__mro__:
|
||||
if '__tablename__' not in base.__dict__:
|
||||
continue
|
||||
|
||||
if isinstance(base.__dict__['__tablename__'], declared_attr):
|
||||
return False
|
||||
|
||||
return not (
|
||||
base is cls
|
||||
or base.__dict__.get('__abstract__', False)
|
||||
or not isinstance(base, DeclarativeMeta)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
camelcase_re = re.compile(r'([A-Z]+)(?=[a-z0-9])')
|
||||
|
||||
|
||||
def camel_to_snake_case(name):
|
||||
def _join(match):
|
||||
word = match.group()
|
||||
|
||||
if len(word) > 1:
|
||||
return ('_%s_%s' % (word[:-1], word[-1])).lower()
|
||||
|
||||
return '_' + word.lower()
|
||||
|
||||
return camelcase_re.sub(_join, name).lstrip('_')
|
||||
|
||||
|
||||
class NameMetaMixin(object):
|
||||
def __init__(cls, name, bases, d):
|
||||
if should_set_tablename(cls):
|
||||
cls.__tablename__ = camel_to_snake_case(cls.__name__)
|
||||
|
||||
super(NameMetaMixin, cls).__init__(name, bases, d)
|
||||
|
||||
# __table_cls__ has run at this point
|
||||
# if no table was created, use the parent table
|
||||
if (
|
||||
'__tablename__' not in cls.__dict__
|
||||
and '__table__' in cls.__dict__
|
||||
and cls.__dict__['__table__'] is None
|
||||
):
|
||||
del cls.__table__
|
||||
|
||||
def __table_cls__(cls, *args, **kwargs):
|
||||
"""This is called by SQLAlchemy during mapper setup. It determines the
|
||||
final table object that the model will use.
|
||||
|
||||
If no primary key is found, that indicates single-table inheritance,
|
||||
so no table will be created and ``__tablename__`` will be unset.
|
||||
"""
|
||||
# check if a table with this name already exists
|
||||
# allows reflected tables to be applied to model by name
|
||||
key = _get_table_key(args[0], kwargs.get('schema'))
|
||||
|
||||
if key in cls.metadata.tables:
|
||||
return sa.Table(*args, **kwargs)
|
||||
|
||||
# if a primary key or constraint is found, create a table for
|
||||
# joined-table inheritance
|
||||
for arg in args:
|
||||
if (
|
||||
(isinstance(arg, sa.Column) and arg.primary_key)
|
||||
or isinstance(arg, sa.PrimaryKeyConstraint)
|
||||
):
|
||||
return sa.Table(*args, **kwargs)
|
||||
|
||||
# if no base classes define a table, return one
|
||||
# ensures the correct error shows up when missing a primary key
|
||||
for base in cls.__mro__[1:-1]:
|
||||
if '__table__' in base.__dict__:
|
||||
break
|
||||
else:
|
||||
return sa.Table(*args, **kwargs)
|
||||
|
||||
# single-table inheritance, use the parent tablename
|
||||
if '__tablename__' in cls.__dict__:
|
||||
del cls.__tablename__
|
||||
|
||||
|
||||
class BindMetaMixin(object):
|
||||
def __init__(cls, name, bases, d):
|
||||
bind_key = (
|
||||
d.pop('__bind_key__', None)
|
||||
or getattr(cls, '__bind_key__', None)
|
||||
)
|
||||
|
||||
super(BindMetaMixin, cls).__init__(name, bases, d)
|
||||
|
||||
if bind_key is not None and hasattr(cls, '__table__'):
|
||||
cls.__table__.info['bind_key'] = bind_key
|
||||
|
||||
|
||||
class DefaultMeta(NameMetaMixin, BindMetaMixin, DeclarativeMeta):
|
||||
pass
|
||||
|
||||
|
||||
class Model(object):
|
||||
"""Base class for SQLAlchemy declarative base model.
|
||||
|
||||
To define models, subclass :attr:`db.Model <SQLAlchemy.Model>`, not this
|
||||
class. To customize ``db.Model``, subclass this and pass it as
|
||||
``model_class`` to :class:`SQLAlchemy`.
|
||||
"""
|
||||
|
||||
#: Query class used by :attr:`query`. Defaults to
|
||||
# :class:`SQLAlchemy.Query`, which defaults to :class:`BaseQuery`.
|
||||
query_class = None
|
||||
|
||||
#: Convenience property to query the database for instances of this model
|
||||
# using the current session. Equivalent to ``db.session.query(Model)``
|
||||
# unless :attr:`query_class` has been changed.
|
||||
query = None
|
||||
|
||||
def __repr__(self):
|
||||
identity = inspect(self).identity
|
||||
if identity is None:
|
||||
pk = "(transient {0})".format(id(self))
|
||||
else:
|
||||
pk = ', '.join(to_str(value) for value in identity)
|
||||
return '<{0} {1}>'.format(type(self).__name__, pk)
|
|
@ -0,0 +1,148 @@
|
|||
# sqlalchemy/__init__.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
|
||||
from .sql import (
|
||||
alias,
|
||||
all_,
|
||||
and_,
|
||||
any_,
|
||||
asc,
|
||||
between,
|
||||
bindparam,
|
||||
case,
|
||||
cast,
|
||||
collate,
|
||||
column,
|
||||
delete,
|
||||
desc,
|
||||
distinct,
|
||||
except_,
|
||||
except_all,
|
||||
exists,
|
||||
extract,
|
||||
false,
|
||||
func,
|
||||
funcfilter,
|
||||
insert,
|
||||
intersect,
|
||||
intersect_all,
|
||||
join,
|
||||
lateral,
|
||||
literal,
|
||||
literal_column,
|
||||
modifier,
|
||||
not_,
|
||||
null,
|
||||
nullsfirst,
|
||||
nullslast,
|
||||
or_,
|
||||
outerjoin,
|
||||
outparam,
|
||||
over,
|
||||
select,
|
||||
subquery,
|
||||
table,
|
||||
tablesample,
|
||||
text,
|
||||
true,
|
||||
tuple_,
|
||||
type_coerce,
|
||||
union,
|
||||
union_all,
|
||||
update,
|
||||
within_group,
|
||||
)
|
||||
|
||||
from .types import (
|
||||
ARRAY,
|
||||
BIGINT,
|
||||
BINARY,
|
||||
BLOB,
|
||||
BOOLEAN,
|
||||
BigInteger,
|
||||
Binary,
|
||||
Boolean,
|
||||
CHAR,
|
||||
CLOB,
|
||||
DATE,
|
||||
DATETIME,
|
||||
DECIMAL,
|
||||
Date,
|
||||
DateTime,
|
||||
Enum,
|
||||
FLOAT,
|
||||
Float,
|
||||
INT,
|
||||
INTEGER,
|
||||
Integer,
|
||||
Interval,
|
||||
JSON,
|
||||
LargeBinary,
|
||||
NCHAR,
|
||||
NVARCHAR,
|
||||
NUMERIC,
|
||||
Numeric,
|
||||
PickleType,
|
||||
REAL,
|
||||
SMALLINT,
|
||||
SmallInteger,
|
||||
String,
|
||||
TEXT,
|
||||
TIME,
|
||||
TIMESTAMP,
|
||||
Text,
|
||||
Time,
|
||||
TypeDecorator,
|
||||
Unicode,
|
||||
UnicodeText,
|
||||
VARBINARY,
|
||||
VARCHAR,
|
||||
)
|
||||
|
||||
|
||||
from .schema import (
|
||||
CheckConstraint,
|
||||
Column,
|
||||
ColumnDefault,
|
||||
Constraint,
|
||||
DefaultClause,
|
||||
FetchedValue,
|
||||
ForeignKey,
|
||||
ForeignKeyConstraint,
|
||||
Index,
|
||||
MetaData,
|
||||
PassiveDefault,
|
||||
PrimaryKeyConstraint,
|
||||
Sequence,
|
||||
Table,
|
||||
ThreadLocalMetaData,
|
||||
UniqueConstraint,
|
||||
DDL,
|
||||
BLANK_SCHEMA
|
||||
)
|
||||
|
||||
|
||||
from .inspection import inspect
|
||||
from .engine import create_engine, engine_from_config
|
||||
|
||||
__version__ = '1.2.7'
|
||||
|
||||
|
||||
def __go(lcls):
|
||||
global __all__
|
||||
|
||||
from . import events
|
||||
from . import util as _sa_util
|
||||
|
||||
import inspect as _inspect
|
||||
|
||||
__all__ = sorted(name for name, obj in lcls.items()
|
||||
if not (name.startswith('_') or _inspect.ismodule(obj)))
|
||||
|
||||
_sa_util.dependencies.resolve_all("sqlalchemy")
|
||||
__go(locals())
|
|
@ -0,0 +1,10 @@
|
|||
# connectors/__init__.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
|
||||
class Connector(object):
|
||||
pass
|
|
@ -0,0 +1,150 @@
|
|||
# connectors/mxodbc.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
Provide a SQLALchemy connector for the eGenix mxODBC commercial
|
||||
Python adapter for ODBC. This is not a free product, but eGenix
|
||||
provides SQLAlchemy with a license for use in continuous integration
|
||||
testing.
|
||||
|
||||
This has been tested for use with mxODBC 3.1.2 on SQL Server 2005
|
||||
and 2008, using the SQL Server Native driver. However, it is
|
||||
possible for this to be used on other database platforms.
|
||||
|
||||
For more info on mxODBC, see http://www.egenix.com/
|
||||
|
||||
"""
|
||||
|
||||
import sys
|
||||
import re
|
||||
import warnings
|
||||
|
||||
from . import Connector
|
||||
|
||||
|
||||
class MxODBCConnector(Connector):
|
||||
driver = 'mxodbc'
|
||||
|
||||
supports_sane_multi_rowcount = False
|
||||
supports_unicode_statements = True
|
||||
supports_unicode_binds = True
|
||||
|
||||
supports_native_decimal = True
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
# this classmethod will normally be replaced by an instance
|
||||
# attribute of the same name, so this is normally only called once.
|
||||
cls._load_mx_exceptions()
|
||||
platform = sys.platform
|
||||
if platform == 'win32':
|
||||
from mx.ODBC import Windows as module
|
||||
# this can be the string "linux2", and possibly others
|
||||
elif 'linux' in platform:
|
||||
from mx.ODBC import unixODBC as module
|
||||
elif platform == 'darwin':
|
||||
from mx.ODBC import iODBC as module
|
||||
else:
|
||||
raise ImportError("Unrecognized platform for mxODBC import")
|
||||
return module
|
||||
|
||||
@classmethod
|
||||
def _load_mx_exceptions(cls):
|
||||
""" Import mxODBC exception classes into the module namespace,
|
||||
as if they had been imported normally. This is done here
|
||||
to avoid requiring all SQLAlchemy users to install mxODBC.
|
||||
"""
|
||||
global InterfaceError, ProgrammingError
|
||||
from mx.ODBC import InterfaceError
|
||||
from mx.ODBC import ProgrammingError
|
||||
|
||||
def on_connect(self):
|
||||
def connect(conn):
|
||||
conn.stringformat = self.dbapi.MIXED_STRINGFORMAT
|
||||
conn.datetimeformat = self.dbapi.PYDATETIME_DATETIMEFORMAT
|
||||
conn.decimalformat = self.dbapi.DECIMAL_DECIMALFORMAT
|
||||
conn.errorhandler = self._error_handler()
|
||||
return connect
|
||||
|
||||
def _error_handler(self):
|
||||
""" Return a handler that adjusts mxODBC's raised Warnings to
|
||||
emit Python standard warnings.
|
||||
"""
|
||||
from mx.ODBC.Error import Warning as MxOdbcWarning
|
||||
|
||||
def error_handler(connection, cursor, errorclass, errorvalue):
|
||||
if issubclass(errorclass, MxOdbcWarning):
|
||||
errorclass.__bases__ = (Warning,)
|
||||
warnings.warn(message=str(errorvalue),
|
||||
category=errorclass,
|
||||
stacklevel=2)
|
||||
else:
|
||||
raise errorclass(errorvalue)
|
||||
return error_handler
|
||||
|
||||
def create_connect_args(self, url):
|
||||
""" Return a tuple of *args,**kwargs for creating a connection.
|
||||
|
||||
The mxODBC 3.x connection constructor looks like this:
|
||||
|
||||
connect(dsn, user='', password='',
|
||||
clear_auto_commit=1, errorhandler=None)
|
||||
|
||||
This method translates the values in the provided uri
|
||||
into args and kwargs needed to instantiate an mxODBC Connection.
|
||||
|
||||
The arg 'errorhandler' is not used by SQLAlchemy and will
|
||||
not be populated.
|
||||
|
||||
"""
|
||||
opts = url.translate_connect_args(username='user')
|
||||
opts.update(url.query)
|
||||
args = opts.pop('host')
|
||||
opts.pop('port', None)
|
||||
opts.pop('database', None)
|
||||
return (args,), opts
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
# TODO: eGenix recommends checking connection.closed here
|
||||
# Does that detect dropped connections ?
|
||||
if isinstance(e, self.dbapi.ProgrammingError):
|
||||
return "connection already closed" in str(e)
|
||||
elif isinstance(e, self.dbapi.Error):
|
||||
return '[08S01]' in str(e)
|
||||
else:
|
||||
return False
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
# eGenix suggests using conn.dbms_version instead
|
||||
# of what we're doing here
|
||||
dbapi_con = connection.connection
|
||||
version = []
|
||||
r = re.compile(r'[.\-]')
|
||||
# 18 == pyodbc.SQL_DBMS_VER
|
||||
for n in r.split(dbapi_con.getinfo(18)[1]):
|
||||
try:
|
||||
version.append(int(n))
|
||||
except ValueError:
|
||||
version.append(n)
|
||||
return tuple(version)
|
||||
|
||||
def _get_direct(self, context):
|
||||
if context:
|
||||
native_odbc_execute = context.execution_options.\
|
||||
get('native_odbc_execute', 'auto')
|
||||
# default to direct=True in all cases, is more generally
|
||||
# compatible especially with SQL Server
|
||||
return False if native_odbc_execute is True else True
|
||||
else:
|
||||
return True
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context=None):
|
||||
cursor.executemany(
|
||||
statement, parameters, direct=self._get_direct(context))
|
||||
|
||||
def do_execute(self, cursor, statement, parameters, context=None):
|
||||
cursor.execute(statement, parameters, direct=self._get_direct(context))
|
|
@ -0,0 +1,164 @@
|
|||
# connectors/pyodbc.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from . import Connector
|
||||
from .. import util
|
||||
|
||||
|
||||
import re
|
||||
|
||||
|
||||
class PyODBCConnector(Connector):
|
||||
driver = 'pyodbc'
|
||||
|
||||
supports_sane_rowcount_returning = False
|
||||
supports_sane_multi_rowcount = False
|
||||
|
||||
supports_unicode_statements = True
|
||||
supports_unicode_binds = True
|
||||
|
||||
supports_native_decimal = True
|
||||
default_paramstyle = 'named'
|
||||
|
||||
# for non-DSN connections, this *may* be used to
|
||||
# hold the desired driver name
|
||||
pyodbc_driver_name = None
|
||||
|
||||
def __init__(self, supports_unicode_binds=None, **kw):
|
||||
super(PyODBCConnector, self).__init__(**kw)
|
||||
if supports_unicode_binds is not None:
|
||||
self.supports_unicode_binds = supports_unicode_binds
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('pyodbc')
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
opts.update(url.query)
|
||||
|
||||
keys = opts
|
||||
|
||||
query = url.query
|
||||
|
||||
connect_args = {}
|
||||
for param in ('ansi', 'unicode_results', 'autocommit'):
|
||||
if param in keys:
|
||||
connect_args[param] = util.asbool(keys.pop(param))
|
||||
|
||||
if 'odbc_connect' in keys:
|
||||
connectors = [util.unquote_plus(keys.pop('odbc_connect'))]
|
||||
else:
|
||||
def check_quote(token):
|
||||
if ";" in str(token):
|
||||
token = "'%s'" % token
|
||||
return token
|
||||
|
||||
keys = dict(
|
||||
(k, check_quote(v)) for k, v in keys.items()
|
||||
)
|
||||
|
||||
dsn_connection = 'dsn' in keys or \
|
||||
('host' in keys and 'database' not in keys)
|
||||
if dsn_connection:
|
||||
connectors = ['dsn=%s' % (keys.pop('host', '') or
|
||||
keys.pop('dsn', ''))]
|
||||
else:
|
||||
port = ''
|
||||
if 'port' in keys and 'port' not in query:
|
||||
port = ',%d' % int(keys.pop('port'))
|
||||
|
||||
connectors = []
|
||||
driver = keys.pop('driver', self.pyodbc_driver_name)
|
||||
if driver is None:
|
||||
util.warn(
|
||||
"No driver name specified; "
|
||||
"this is expected by PyODBC when using "
|
||||
"DSN-less connections")
|
||||
else:
|
||||
connectors.append("DRIVER={%s}" % driver)
|
||||
|
||||
connectors.extend(
|
||||
[
|
||||
'Server=%s%s' % (keys.pop('host', ''), port),
|
||||
'Database=%s' % keys.pop('database', '')
|
||||
])
|
||||
|
||||
user = keys.pop("user", None)
|
||||
if user:
|
||||
connectors.append("UID=%s" % user)
|
||||
connectors.append("PWD=%s" % keys.pop('password', ''))
|
||||
else:
|
||||
connectors.append("Trusted_Connection=Yes")
|
||||
|
||||
# if set to 'Yes', the ODBC layer will try to automagically
|
||||
# convert textual data from your database encoding to your
|
||||
# client encoding. This should obviously be set to 'No' if
|
||||
# you query a cp1253 encoded database from a latin1 client...
|
||||
if 'odbc_autotranslate' in keys:
|
||||
connectors.append("AutoTranslate=%s" %
|
||||
keys.pop("odbc_autotranslate"))
|
||||
|
||||
connectors.extend(['%s=%s' % (k, v) for k, v in keys.items()])
|
||||
|
||||
return [[";".join(connectors)], connect_args]
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, self.dbapi.ProgrammingError):
|
||||
return "The cursor's connection has been closed." in str(e) or \
|
||||
'Attempt to use a closed connection.' in str(e)
|
||||
else:
|
||||
return False
|
||||
|
||||
# def initialize(self, connection):
|
||||
# super(PyODBCConnector, self).initialize(connection)
|
||||
|
||||
def _dbapi_version(self):
|
||||
if not self.dbapi:
|
||||
return ()
|
||||
return self._parse_dbapi_version(self.dbapi.version)
|
||||
|
||||
def _parse_dbapi_version(self, vers):
|
||||
m = re.match(
|
||||
r'(?:py.*-)?([\d\.]+)(?:-(\w+))?',
|
||||
vers
|
||||
)
|
||||
if not m:
|
||||
return ()
|
||||
vers = tuple([int(x) for x in m.group(1).split(".")])
|
||||
if m.group(2):
|
||||
vers += (m.group(2),)
|
||||
return vers
|
||||
|
||||
def _get_server_version_info(self, connection, allow_chars=True):
|
||||
# NOTE: this function is not reliable, particularly when
|
||||
# freetds is in use. Implement database-specific server version
|
||||
# queries.
|
||||
dbapi_con = connection.connection
|
||||
version = []
|
||||
r = re.compile(r'[.\-]')
|
||||
for n in r.split(dbapi_con.getinfo(self.dbapi.SQL_DBMS_VER)):
|
||||
try:
|
||||
version.append(int(n))
|
||||
except ValueError:
|
||||
if allow_chars:
|
||||
version.append(n)
|
||||
return tuple(version)
|
||||
|
||||
def set_isolation_level(self, connection, level):
|
||||
# adjust for ConnectionFairy being present
|
||||
# allows attribute set e.g. "connection.autocommit = True"
|
||||
# to work properly
|
||||
if hasattr(connection, 'connection'):
|
||||
connection = connection.connection
|
||||
|
||||
if level == 'AUTOCOMMIT':
|
||||
connection.autocommit = True
|
||||
else:
|
||||
connection.autocommit = False
|
||||
super(PyODBCConnector, self).set_isolation_level(connection,
|
||||
level)
|
|
@ -0,0 +1,60 @@
|
|||
# connectors/zxJDBC.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import sys
|
||||
from . import Connector
|
||||
|
||||
|
||||
class ZxJDBCConnector(Connector):
|
||||
driver = 'zxjdbc'
|
||||
|
||||
supports_sane_rowcount = False
|
||||
supports_sane_multi_rowcount = False
|
||||
|
||||
supports_unicode_binds = True
|
||||
supports_unicode_statements = sys.version > '2.5.0+'
|
||||
description_encoding = None
|
||||
default_paramstyle = 'qmark'
|
||||
|
||||
jdbc_db_name = None
|
||||
jdbc_driver_name = None
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
from com.ziclix.python.sql import zxJDBC
|
||||
return zxJDBC
|
||||
|
||||
def _driver_kwargs(self):
|
||||
"""Return kw arg dict to be sent to connect()."""
|
||||
return {}
|
||||
|
||||
def _create_jdbc_url(self, url):
|
||||
"""Create a JDBC url from a :class:`~sqlalchemy.engine.url.URL`"""
|
||||
return 'jdbc:%s://%s%s/%s' % (self.jdbc_db_name, url.host,
|
||||
url.port is not None
|
||||
and ':%s' % url.port or '',
|
||||
url.database)
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = self._driver_kwargs()
|
||||
opts.update(url.query)
|
||||
return [
|
||||
[self._create_jdbc_url(url),
|
||||
url.username, url.password,
|
||||
self.jdbc_driver_name],
|
||||
opts]
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if not isinstance(e, self.dbapi.ProgrammingError):
|
||||
return False
|
||||
e = str(e)
|
||||
return 'connection is closed' in e or 'cursor is closed' in e
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
# use connection.connection.dbversion, and parse appropriately
|
||||
# to get a tuple
|
||||
raise NotImplementedError()
|
|
@ -0,0 +1,30 @@
|
|||
# databases/__init__.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Include imports from the sqlalchemy.dialects package for backwards
|
||||
compatibility with pre 0.6 versions.
|
||||
|
||||
"""
|
||||
from ..dialects.sqlite import base as sqlite
|
||||
from ..dialects.postgresql import base as postgresql
|
||||
postgres = postgresql
|
||||
from ..dialects.mysql import base as mysql
|
||||
from ..dialects.oracle import base as oracle
|
||||
from ..dialects.firebird import base as firebird
|
||||
from ..dialects.mssql import base as mssql
|
||||
from ..dialects.sybase import base as sybase
|
||||
|
||||
|
||||
__all__ = (
|
||||
'firebird',
|
||||
'mssql',
|
||||
'mysql',
|
||||
'postgresql',
|
||||
'sqlite',
|
||||
'oracle',
|
||||
'sybase',
|
||||
)
|
|
@ -0,0 +1,56 @@
|
|||
# dialects/__init__.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
__all__ = (
|
||||
'firebird',
|
||||
'mssql',
|
||||
'mysql',
|
||||
'oracle',
|
||||
'postgresql',
|
||||
'sqlite',
|
||||
'sybase',
|
||||
)
|
||||
|
||||
from .. import util
|
||||
|
||||
_translates = {'postgres': 'postgresql'}
|
||||
|
||||
def _auto_fn(name):
|
||||
"""default dialect importer.
|
||||
|
||||
plugs into the :class:`.PluginLoader`
|
||||
as a first-hit system.
|
||||
|
||||
"""
|
||||
if "." in name:
|
||||
dialect, driver = name.split(".")
|
||||
else:
|
||||
dialect = name
|
||||
driver = "base"
|
||||
|
||||
if dialect in _translates:
|
||||
translated = _translates[dialect]
|
||||
util.warn_deprecated(
|
||||
"The '%s' dialect name has been "
|
||||
"renamed to '%s'" % (dialect, translated)
|
||||
)
|
||||
dialect = translated
|
||||
try:
|
||||
module = __import__('sqlalchemy.dialects.%s' % (dialect, )).dialects
|
||||
except ImportError:
|
||||
return None
|
||||
|
||||
module = getattr(module, dialect)
|
||||
if hasattr(module, driver):
|
||||
module = getattr(module, driver)
|
||||
return lambda: module.dialect
|
||||
else:
|
||||
return None
|
||||
|
||||
registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn)
|
||||
|
||||
plugins = util.PluginLoader("sqlalchemy.plugins")
|
|
@ -0,0 +1,20 @@
|
|||
# firebird/__init__.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from . import base, kinterbasdb, fdb # noqa
|
||||
|
||||
from sqlalchemy.dialects.firebird.base import \
|
||||
SMALLINT, BIGINT, FLOAT, DATE, TIME, \
|
||||
TEXT, NUMERIC, TIMESTAMP, VARCHAR, CHAR, BLOB
|
||||
|
||||
base.dialect = dialect = fdb.dialect
|
||||
|
||||
__all__ = (
|
||||
'SMALLINT', 'BIGINT', 'FLOAT', 'FLOAT', 'DATE', 'TIME',
|
||||
'TEXT', 'NUMERIC', 'FLOAT', 'TIMESTAMP', 'VARCHAR', 'CHAR', 'BLOB',
|
||||
'dialect'
|
||||
)
|
|
@ -0,0 +1,741 @@
|
|||
# firebird/base.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
r"""
|
||||
|
||||
.. dialect:: firebird
|
||||
:name: Firebird
|
||||
|
||||
Firebird Dialects
|
||||
-----------------
|
||||
|
||||
Firebird offers two distinct dialects_ (not to be confused with a
|
||||
SQLAlchemy ``Dialect``):
|
||||
|
||||
dialect 1
|
||||
This is the old syntax and behaviour, inherited from Interbase pre-6.0.
|
||||
|
||||
dialect 3
|
||||
This is the newer and supported syntax, introduced in Interbase 6.0.
|
||||
|
||||
The SQLAlchemy Firebird dialect detects these versions and
|
||||
adjusts its representation of SQL accordingly. However,
|
||||
support for dialect 1 is not well tested and probably has
|
||||
incompatibilities.
|
||||
|
||||
Locking Behavior
|
||||
----------------
|
||||
|
||||
Firebird locks tables aggressively. For this reason, a DROP TABLE may
|
||||
hang until other transactions are released. SQLAlchemy does its best
|
||||
to release transactions as quickly as possible. The most common cause
|
||||
of hanging transactions is a non-fully consumed result set, i.e.::
|
||||
|
||||
result = engine.execute("select * from table")
|
||||
row = result.fetchone()
|
||||
return
|
||||
|
||||
Where above, the ``ResultProxy`` has not been fully consumed. The
|
||||
connection will be returned to the pool and the transactional state
|
||||
rolled back once the Python garbage collector reclaims the objects
|
||||
which hold onto the connection, which often occurs asynchronously.
|
||||
The above use case can be alleviated by calling ``first()`` on the
|
||||
``ResultProxy`` which will fetch the first row and immediately close
|
||||
all remaining cursor/connection resources.
|
||||
|
||||
RETURNING support
|
||||
-----------------
|
||||
|
||||
Firebird 2.0 supports returning a result set from inserts, and 2.1
|
||||
extends that to deletes and updates. This is generically exposed by
|
||||
the SQLAlchemy ``returning()`` method, such as::
|
||||
|
||||
# INSERT..RETURNING
|
||||
result = table.insert().returning(table.c.col1, table.c.col2).\
|
||||
values(name='foo')
|
||||
print result.fetchall()
|
||||
|
||||
# UPDATE..RETURNING
|
||||
raises = empl.update().returning(empl.c.id, empl.c.salary).\
|
||||
where(empl.c.sales>100).\
|
||||
values(dict(salary=empl.c.salary * 1.1))
|
||||
print raises.fetchall()
|
||||
|
||||
|
||||
.. _dialects: http://mc-computing.com/Databases/Firebird/SQL_Dialect.html
|
||||
|
||||
"""
|
||||
|
||||
import datetime
|
||||
|
||||
from sqlalchemy import schema as sa_schema
|
||||
from sqlalchemy import exc, types as sqltypes, sql, util
|
||||
from sqlalchemy.sql import expression
|
||||
from sqlalchemy.engine import base, default, reflection
|
||||
from sqlalchemy.sql import compiler
|
||||
from sqlalchemy.sql.elements import quoted_name
|
||||
|
||||
from sqlalchemy.types import (BIGINT, BLOB, DATE, FLOAT, INTEGER, NUMERIC,
|
||||
SMALLINT, TEXT, TIME, TIMESTAMP, Integer)
|
||||
|
||||
|
||||
RESERVED_WORDS = set([
|
||||
"active", "add", "admin", "after", "all", "alter", "and", "any", "as",
|
||||
"asc", "ascending", "at", "auto", "avg", "before", "begin", "between",
|
||||
"bigint", "bit_length", "blob", "both", "by", "case", "cast", "char",
|
||||
"character", "character_length", "char_length", "check", "close",
|
||||
"collate", "column", "commit", "committed", "computed", "conditional",
|
||||
"connect", "constraint", "containing", "count", "create", "cross",
|
||||
"cstring", "current", "current_connection", "current_date",
|
||||
"current_role", "current_time", "current_timestamp",
|
||||
"current_transaction", "current_user", "cursor", "database", "date",
|
||||
"day", "dec", "decimal", "declare", "default", "delete", "desc",
|
||||
"descending", "disconnect", "distinct", "do", "domain", "double",
|
||||
"drop", "else", "end", "entry_point", "escape", "exception",
|
||||
"execute", "exists", "exit", "external", "extract", "fetch", "file",
|
||||
"filter", "float", "for", "foreign", "from", "full", "function",
|
||||
"gdscode", "generator", "gen_id", "global", "grant", "group",
|
||||
"having", "hour", "if", "in", "inactive", "index", "inner",
|
||||
"input_type", "insensitive", "insert", "int", "integer", "into", "is",
|
||||
"isolation", "join", "key", "leading", "left", "length", "level",
|
||||
"like", "long", "lower", "manual", "max", "maximum_segment", "merge",
|
||||
"min", "minute", "module_name", "month", "names", "national",
|
||||
"natural", "nchar", "no", "not", "null", "numeric", "octet_length",
|
||||
"of", "on", "only", "open", "option", "or", "order", "outer",
|
||||
"output_type", "overflow", "page", "pages", "page_size", "parameter",
|
||||
"password", "plan", "position", "post_event", "precision", "primary",
|
||||
"privileges", "procedure", "protected", "rdb$db_key", "read", "real",
|
||||
"record_version", "recreate", "recursive", "references", "release",
|
||||
"reserv", "reserving", "retain", "returning_values", "returns",
|
||||
"revoke", "right", "rollback", "rows", "row_count", "savepoint",
|
||||
"schema", "second", "segment", "select", "sensitive", "set", "shadow",
|
||||
"shared", "singular", "size", "smallint", "snapshot", "some", "sort",
|
||||
"sqlcode", "stability", "start", "starting", "starts", "statistics",
|
||||
"sub_type", "sum", "suspend", "table", "then", "time", "timestamp",
|
||||
"to", "trailing", "transaction", "trigger", "trim", "uncommitted",
|
||||
"union", "unique", "update", "upper", "user", "using", "value",
|
||||
"values", "varchar", "variable", "varying", "view", "wait", "when",
|
||||
"where", "while", "with", "work", "write", "year",
|
||||
])
|
||||
|
||||
|
||||
class _StringType(sqltypes.String):
|
||||
"""Base for Firebird string types."""
|
||||
|
||||
def __init__(self, charset=None, **kw):
|
||||
self.charset = charset
|
||||
super(_StringType, self).__init__(**kw)
|
||||
|
||||
|
||||
class VARCHAR(_StringType, sqltypes.VARCHAR):
|
||||
"""Firebird VARCHAR type"""
|
||||
__visit_name__ = 'VARCHAR'
|
||||
|
||||
def __init__(self, length=None, **kwargs):
|
||||
super(VARCHAR, self).__init__(length=length, **kwargs)
|
||||
|
||||
|
||||
class CHAR(_StringType, sqltypes.CHAR):
|
||||
"""Firebird CHAR type"""
|
||||
__visit_name__ = 'CHAR'
|
||||
|
||||
def __init__(self, length=None, **kwargs):
|
||||
super(CHAR, self).__init__(length=length, **kwargs)
|
||||
|
||||
|
||||
class _FBDateTime(sqltypes.DateTime):
|
||||
def bind_processor(self, dialect):
|
||||
def process(value):
|
||||
if type(value) == datetime.date:
|
||||
return datetime.datetime(value.year, value.month, value.day)
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
||||
colspecs = {
|
||||
sqltypes.DateTime: _FBDateTime
|
||||
}
|
||||
|
||||
ischema_names = {
|
||||
'SHORT': SMALLINT,
|
||||
'LONG': INTEGER,
|
||||
'QUAD': FLOAT,
|
||||
'FLOAT': FLOAT,
|
||||
'DATE': DATE,
|
||||
'TIME': TIME,
|
||||
'TEXT': TEXT,
|
||||
'INT64': BIGINT,
|
||||
'DOUBLE': FLOAT,
|
||||
'TIMESTAMP': TIMESTAMP,
|
||||
'VARYING': VARCHAR,
|
||||
'CSTRING': CHAR,
|
||||
'BLOB': BLOB,
|
||||
}
|
||||
|
||||
|
||||
# TODO: date conversion types (should be implemented as _FBDateTime,
|
||||
# _FBDate, etc. as bind/result functionality is required)
|
||||
|
||||
class FBTypeCompiler(compiler.GenericTypeCompiler):
|
||||
def visit_boolean(self, type_, **kw):
|
||||
return self.visit_SMALLINT(type_, **kw)
|
||||
|
||||
def visit_datetime(self, type_, **kw):
|
||||
return self.visit_TIMESTAMP(type_, **kw)
|
||||
|
||||
def visit_TEXT(self, type_, **kw):
|
||||
return "BLOB SUB_TYPE 1"
|
||||
|
||||
def visit_BLOB(self, type_, **kw):
|
||||
return "BLOB SUB_TYPE 0"
|
||||
|
||||
def _extend_string(self, type_, basic):
|
||||
charset = getattr(type_, 'charset', None)
|
||||
if charset is None:
|
||||
return basic
|
||||
else:
|
||||
return '%s CHARACTER SET %s' % (basic, charset)
|
||||
|
||||
def visit_CHAR(self, type_, **kw):
|
||||
basic = super(FBTypeCompiler, self).visit_CHAR(type_, **kw)
|
||||
return self._extend_string(type_, basic)
|
||||
|
||||
def visit_VARCHAR(self, type_, **kw):
|
||||
if not type_.length:
|
||||
raise exc.CompileError(
|
||||
"VARCHAR requires a length on dialect %s" %
|
||||
self.dialect.name)
|
||||
basic = super(FBTypeCompiler, self).visit_VARCHAR(type_, **kw)
|
||||
return self._extend_string(type_, basic)
|
||||
|
||||
|
||||
class FBCompiler(sql.compiler.SQLCompiler):
|
||||
"""Firebird specific idiosyncrasies"""
|
||||
|
||||
ansi_bind_rules = True
|
||||
|
||||
# def visit_contains_op_binary(self, binary, operator, **kw):
|
||||
# cant use CONTAINING b.c. it's case insensitive.
|
||||
|
||||
# def visit_notcontains_op_binary(self, binary, operator, **kw):
|
||||
# cant use NOT CONTAINING b.c. it's case insensitive.
|
||||
|
||||
def visit_now_func(self, fn, **kw):
|
||||
return "CURRENT_TIMESTAMP"
|
||||
|
||||
def visit_startswith_op_binary(self, binary, operator, **kw):
|
||||
return '%s STARTING WITH %s' % (
|
||||
binary.left._compiler_dispatch(self, **kw),
|
||||
binary.right._compiler_dispatch(self, **kw))
|
||||
|
||||
def visit_notstartswith_op_binary(self, binary, operator, **kw):
|
||||
return '%s NOT STARTING WITH %s' % (
|
||||
binary.left._compiler_dispatch(self, **kw),
|
||||
binary.right._compiler_dispatch(self, **kw))
|
||||
|
||||
def visit_mod_binary(self, binary, operator, **kw):
|
||||
return "mod(%s, %s)" % (
|
||||
self.process(binary.left, **kw),
|
||||
self.process(binary.right, **kw))
|
||||
|
||||
def visit_alias(self, alias, asfrom=False, **kwargs):
|
||||
if self.dialect._version_two:
|
||||
return super(FBCompiler, self).\
|
||||
visit_alias(alias, asfrom=asfrom, **kwargs)
|
||||
else:
|
||||
# Override to not use the AS keyword which FB 1.5 does not like
|
||||
if asfrom:
|
||||
alias_name = isinstance(alias.name,
|
||||
expression._truncated_label) and \
|
||||
self._truncated_identifier("alias",
|
||||
alias.name) or alias.name
|
||||
|
||||
return self.process(
|
||||
alias.original, asfrom=asfrom, **kwargs) + \
|
||||
" " + \
|
||||
self.preparer.format_alias(alias, alias_name)
|
||||
else:
|
||||
return self.process(alias.original, **kwargs)
|
||||
|
||||
def visit_substring_func(self, func, **kw):
|
||||
s = self.process(func.clauses.clauses[0])
|
||||
start = self.process(func.clauses.clauses[1])
|
||||
if len(func.clauses.clauses) > 2:
|
||||
length = self.process(func.clauses.clauses[2])
|
||||
return "SUBSTRING(%s FROM %s FOR %s)" % (s, start, length)
|
||||
else:
|
||||
return "SUBSTRING(%s FROM %s)" % (s, start)
|
||||
|
||||
def visit_length_func(self, function, **kw):
|
||||
if self.dialect._version_two:
|
||||
return "char_length" + self.function_argspec(function)
|
||||
else:
|
||||
return "strlen" + self.function_argspec(function)
|
||||
|
||||
visit_char_length_func = visit_length_func
|
||||
|
||||
def function_argspec(self, func, **kw):
|
||||
# TODO: this probably will need to be
|
||||
# narrowed to a fixed list, some no-arg functions
|
||||
# may require parens - see similar example in the oracle
|
||||
# dialect
|
||||
if func.clauses is not None and len(func.clauses):
|
||||
return self.process(func.clause_expr, **kw)
|
||||
else:
|
||||
return ""
|
||||
|
||||
def default_from(self):
|
||||
return " FROM rdb$database"
|
||||
|
||||
def visit_sequence(self, seq, **kw):
|
||||
return "gen_id(%s, 1)" % self.preparer.format_sequence(seq)
|
||||
|
||||
def get_select_precolumns(self, select, **kw):
|
||||
"""Called when building a ``SELECT`` statement, position is just
|
||||
before column list Firebird puts the limit and offset right
|
||||
after the ``SELECT``...
|
||||
"""
|
||||
|
||||
result = ""
|
||||
if select._limit_clause is not None:
|
||||
result += "FIRST %s " % self.process(select._limit_clause, **kw)
|
||||
if select._offset_clause is not None:
|
||||
result += "SKIP %s " % self.process(select._offset_clause, **kw)
|
||||
if select._distinct:
|
||||
result += "DISTINCT "
|
||||
return result
|
||||
|
||||
def limit_clause(self, select, **kw):
|
||||
"""Already taken care of in the `get_select_precolumns` method."""
|
||||
|
||||
return ""
|
||||
|
||||
def returning_clause(self, stmt, returning_cols):
|
||||
columns = [
|
||||
self._label_select_column(None, c, True, False, {})
|
||||
for c in expression._select_iterables(returning_cols)
|
||||
]
|
||||
|
||||
return 'RETURNING ' + ', '.join(columns)
|
||||
|
||||
|
||||
class FBDDLCompiler(sql.compiler.DDLCompiler):
|
||||
"""Firebird syntactic idiosyncrasies"""
|
||||
|
||||
def visit_create_sequence(self, create):
|
||||
"""Generate a ``CREATE GENERATOR`` statement for the sequence."""
|
||||
|
||||
# no syntax for these
|
||||
# http://www.firebirdsql.org/manual/generatorguide-sqlsyntax.html
|
||||
if create.element.start is not None:
|
||||
raise NotImplemented(
|
||||
"Firebird SEQUENCE doesn't support START WITH")
|
||||
if create.element.increment is not None:
|
||||
raise NotImplemented(
|
||||
"Firebird SEQUENCE doesn't support INCREMENT BY")
|
||||
|
||||
if self.dialect._version_two:
|
||||
return "CREATE SEQUENCE %s" % \
|
||||
self.preparer.format_sequence(create.element)
|
||||
else:
|
||||
return "CREATE GENERATOR %s" % \
|
||||
self.preparer.format_sequence(create.element)
|
||||
|
||||
def visit_drop_sequence(self, drop):
|
||||
"""Generate a ``DROP GENERATOR`` statement for the sequence."""
|
||||
|
||||
if self.dialect._version_two:
|
||||
return "DROP SEQUENCE %s" % \
|
||||
self.preparer.format_sequence(drop.element)
|
||||
else:
|
||||
return "DROP GENERATOR %s" % \
|
||||
self.preparer.format_sequence(drop.element)
|
||||
|
||||
|
||||
class FBIdentifierPreparer(sql.compiler.IdentifierPreparer):
|
||||
"""Install Firebird specific reserved words."""
|
||||
|
||||
reserved_words = RESERVED_WORDS
|
||||
illegal_initial_characters = compiler.ILLEGAL_INITIAL_CHARACTERS.union(
|
||||
['_'])
|
||||
|
||||
def __init__(self, dialect):
|
||||
super(FBIdentifierPreparer, self).__init__(dialect, omit_schema=True)
|
||||
|
||||
|
||||
class FBExecutionContext(default.DefaultExecutionContext):
|
||||
def fire_sequence(self, seq, type_):
|
||||
"""Get the next value from the sequence using ``gen_id()``."""
|
||||
|
||||
return self._execute_scalar(
|
||||
"SELECT gen_id(%s, 1) FROM rdb$database" %
|
||||
self.dialect.identifier_preparer.format_sequence(seq),
|
||||
type_
|
||||
)
|
||||
|
||||
|
||||
class FBDialect(default.DefaultDialect):
|
||||
"""Firebird dialect"""
|
||||
|
||||
name = 'firebird'
|
||||
|
||||
max_identifier_length = 31
|
||||
|
||||
supports_sequences = True
|
||||
sequences_optional = False
|
||||
supports_default_values = True
|
||||
postfetch_lastrowid = False
|
||||
|
||||
supports_native_boolean = False
|
||||
|
||||
requires_name_normalize = True
|
||||
supports_empty_insert = False
|
||||
|
||||
statement_compiler = FBCompiler
|
||||
ddl_compiler = FBDDLCompiler
|
||||
preparer = FBIdentifierPreparer
|
||||
type_compiler = FBTypeCompiler
|
||||
execution_ctx_cls = FBExecutionContext
|
||||
|
||||
colspecs = colspecs
|
||||
ischema_names = ischema_names
|
||||
|
||||
construct_arguments = []
|
||||
|
||||
# defaults to dialect ver. 3,
|
||||
# will be autodetected off upon
|
||||
# first connect
|
||||
_version_two = True
|
||||
|
||||
def initialize(self, connection):
|
||||
super(FBDialect, self).initialize(connection)
|
||||
self._version_two = ('firebird' in self.server_version_info and
|
||||
self.server_version_info >= (2, )
|
||||
) or \
|
||||
('interbase' in self.server_version_info and
|
||||
self.server_version_info >= (6, )
|
||||
)
|
||||
|
||||
if not self._version_two:
|
||||
# TODO: whatever other pre < 2.0 stuff goes here
|
||||
self.ischema_names = ischema_names.copy()
|
||||
self.ischema_names['TIMESTAMP'] = sqltypes.DATE
|
||||
self.colspecs = {
|
||||
sqltypes.DateTime: sqltypes.DATE
|
||||
}
|
||||
|
||||
self.implicit_returning = self._version_two and \
|
||||
self.__dict__.get('implicit_returning', True)
|
||||
|
||||
def normalize_name(self, name):
|
||||
# Remove trailing spaces: FB uses a CHAR() type,
|
||||
# that is padded with spaces
|
||||
name = name and name.rstrip()
|
||||
if name is None:
|
||||
return None
|
||||
elif name.upper() == name and \
|
||||
not self.identifier_preparer._requires_quotes(name.lower()):
|
||||
return name.lower()
|
||||
elif name.lower() == name:
|
||||
return quoted_name(name, quote=True)
|
||||
else:
|
||||
return name
|
||||
|
||||
def denormalize_name(self, name):
|
||||
if name is None:
|
||||
return None
|
||||
elif name.lower() == name and \
|
||||
not self.identifier_preparer._requires_quotes(name.lower()):
|
||||
return name.upper()
|
||||
else:
|
||||
return name
|
||||
|
||||
def has_table(self, connection, table_name, schema=None):
|
||||
"""Return ``True`` if the given table exists, ignoring
|
||||
the `schema`."""
|
||||
|
||||
tblqry = """
|
||||
SELECT 1 AS has_table FROM rdb$database
|
||||
WHERE EXISTS (SELECT rdb$relation_name
|
||||
FROM rdb$relations
|
||||
WHERE rdb$relation_name=?)
|
||||
"""
|
||||
c = connection.execute(tblqry, [self.denormalize_name(table_name)])
|
||||
return c.first() is not None
|
||||
|
||||
def has_sequence(self, connection, sequence_name, schema=None):
|
||||
"""Return ``True`` if the given sequence (generator) exists."""
|
||||
|
||||
genqry = """
|
||||
SELECT 1 AS has_sequence FROM rdb$database
|
||||
WHERE EXISTS (SELECT rdb$generator_name
|
||||
FROM rdb$generators
|
||||
WHERE rdb$generator_name=?)
|
||||
"""
|
||||
c = connection.execute(genqry, [self.denormalize_name(sequence_name)])
|
||||
return c.first() is not None
|
||||
|
||||
@reflection.cache
|
||||
def get_table_names(self, connection, schema=None, **kw):
|
||||
# there are two queries commonly mentioned for this.
|
||||
# this one, using view_blr, is at the Firebird FAQ among other places:
|
||||
# http://www.firebirdfaq.org/faq174/
|
||||
s = """
|
||||
select rdb$relation_name
|
||||
from rdb$relations
|
||||
where rdb$view_blr is null
|
||||
and (rdb$system_flag is null or rdb$system_flag = 0);
|
||||
"""
|
||||
|
||||
# the other query is this one. It's not clear if there's really
|
||||
# any difference between these two. This link:
|
||||
# http://www.alberton.info/firebird_sql_meta_info.html#.Ur3vXfZGni8
|
||||
# states them as interchangeable. Some discussion at [ticket:2898]
|
||||
# SELECT DISTINCT rdb$relation_name
|
||||
# FROM rdb$relation_fields
|
||||
# WHERE rdb$system_flag=0 AND rdb$view_context IS NULL
|
||||
|
||||
return [self.normalize_name(row[0]) for row in connection.execute(s)]
|
||||
|
||||
@reflection.cache
|
||||
def get_view_names(self, connection, schema=None, **kw):
|
||||
# see http://www.firebirdfaq.org/faq174/
|
||||
s = """
|
||||
select rdb$relation_name
|
||||
from rdb$relations
|
||||
where rdb$view_blr is not null
|
||||
and (rdb$system_flag is null or rdb$system_flag = 0);
|
||||
"""
|
||||
return [self.normalize_name(row[0]) for row in connection.execute(s)]
|
||||
|
||||
@reflection.cache
|
||||
def get_view_definition(self, connection, view_name, schema=None, **kw):
|
||||
qry = """
|
||||
SELECT rdb$view_source AS view_source
|
||||
FROM rdb$relations
|
||||
WHERE rdb$relation_name=?
|
||||
"""
|
||||
rp = connection.execute(qry, [self.denormalize_name(view_name)])
|
||||
row = rp.first()
|
||||
if row:
|
||||
return row['view_source']
|
||||
else:
|
||||
return None
|
||||
|
||||
@reflection.cache
|
||||
def get_pk_constraint(self, connection, table_name, schema=None, **kw):
|
||||
# Query to extract the PK/FK constrained fields of the given table
|
||||
keyqry = """
|
||||
SELECT se.rdb$field_name AS fname
|
||||
FROM rdb$relation_constraints rc
|
||||
JOIN rdb$index_segments se ON rc.rdb$index_name=se.rdb$index_name
|
||||
WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=?
|
||||
"""
|
||||
tablename = self.denormalize_name(table_name)
|
||||
# get primary key fields
|
||||
c = connection.execute(keyqry, ["PRIMARY KEY", tablename])
|
||||
pkfields = [self.normalize_name(r['fname']) for r in c.fetchall()]
|
||||
return {'constrained_columns': pkfields, 'name': None}
|
||||
|
||||
@reflection.cache
|
||||
def get_column_sequence(self, connection,
|
||||
table_name, column_name,
|
||||
schema=None, **kw):
|
||||
tablename = self.denormalize_name(table_name)
|
||||
colname = self.denormalize_name(column_name)
|
||||
# Heuristic-query to determine the generator associated to a PK field
|
||||
genqry = """
|
||||
SELECT trigdep.rdb$depended_on_name AS fgenerator
|
||||
FROM rdb$dependencies tabdep
|
||||
JOIN rdb$dependencies trigdep
|
||||
ON tabdep.rdb$dependent_name=trigdep.rdb$dependent_name
|
||||
AND trigdep.rdb$depended_on_type=14
|
||||
AND trigdep.rdb$dependent_type=2
|
||||
JOIN rdb$triggers trig ON
|
||||
trig.rdb$trigger_name=tabdep.rdb$dependent_name
|
||||
WHERE tabdep.rdb$depended_on_name=?
|
||||
AND tabdep.rdb$depended_on_type=0
|
||||
AND trig.rdb$trigger_type=1
|
||||
AND tabdep.rdb$field_name=?
|
||||
AND (SELECT count(*)
|
||||
FROM rdb$dependencies trigdep2
|
||||
WHERE trigdep2.rdb$dependent_name = trigdep.rdb$dependent_name) = 2
|
||||
"""
|
||||
genr = connection.execute(genqry, [tablename, colname]).first()
|
||||
if genr is not None:
|
||||
return dict(name=self.normalize_name(genr['fgenerator']))
|
||||
|
||||
@reflection.cache
|
||||
def get_columns(self, connection, table_name, schema=None, **kw):
|
||||
# Query to extract the details of all the fields of the given table
|
||||
tblqry = """
|
||||
SELECT r.rdb$field_name AS fname,
|
||||
r.rdb$null_flag AS null_flag,
|
||||
t.rdb$type_name AS ftype,
|
||||
f.rdb$field_sub_type AS stype,
|
||||
f.rdb$field_length/
|
||||
COALESCE(cs.rdb$bytes_per_character,1) AS flen,
|
||||
f.rdb$field_precision AS fprec,
|
||||
f.rdb$field_scale AS fscale,
|
||||
COALESCE(r.rdb$default_source,
|
||||
f.rdb$default_source) AS fdefault
|
||||
FROM rdb$relation_fields r
|
||||
JOIN rdb$fields f ON r.rdb$field_source=f.rdb$field_name
|
||||
JOIN rdb$types t
|
||||
ON t.rdb$type=f.rdb$field_type AND
|
||||
t.rdb$field_name='RDB$FIELD_TYPE'
|
||||
LEFT JOIN rdb$character_sets cs ON
|
||||
f.rdb$character_set_id=cs.rdb$character_set_id
|
||||
WHERE f.rdb$system_flag=0 AND r.rdb$relation_name=?
|
||||
ORDER BY r.rdb$field_position
|
||||
"""
|
||||
# get the PK, used to determine the eventual associated sequence
|
||||
pk_constraint = self.get_pk_constraint(connection, table_name)
|
||||
pkey_cols = pk_constraint['constrained_columns']
|
||||
|
||||
tablename = self.denormalize_name(table_name)
|
||||
# get all of the fields for this table
|
||||
c = connection.execute(tblqry, [tablename])
|
||||
cols = []
|
||||
while True:
|
||||
row = c.fetchone()
|
||||
if row is None:
|
||||
break
|
||||
name = self.normalize_name(row['fname'])
|
||||
orig_colname = row['fname']
|
||||
|
||||
# get the data type
|
||||
colspec = row['ftype'].rstrip()
|
||||
coltype = self.ischema_names.get(colspec)
|
||||
if coltype is None:
|
||||
util.warn("Did not recognize type '%s' of column '%s'" %
|
||||
(colspec, name))
|
||||
coltype = sqltypes.NULLTYPE
|
||||
elif issubclass(coltype, Integer) and row['fprec'] != 0:
|
||||
coltype = NUMERIC(
|
||||
precision=row['fprec'],
|
||||
scale=row['fscale'] * -1)
|
||||
elif colspec in ('VARYING', 'CSTRING'):
|
||||
coltype = coltype(row['flen'])
|
||||
elif colspec == 'TEXT':
|
||||
coltype = TEXT(row['flen'])
|
||||
elif colspec == 'BLOB':
|
||||
if row['stype'] == 1:
|
||||
coltype = TEXT()
|
||||
else:
|
||||
coltype = BLOB()
|
||||
else:
|
||||
coltype = coltype()
|
||||
|
||||
# does it have a default value?
|
||||
defvalue = None
|
||||
if row['fdefault'] is not None:
|
||||
# the value comes down as "DEFAULT 'value'": there may be
|
||||
# more than one whitespace around the "DEFAULT" keyword
|
||||
# and it may also be lower case
|
||||
# (see also http://tracker.firebirdsql.org/browse/CORE-356)
|
||||
defexpr = row['fdefault'].lstrip()
|
||||
assert defexpr[:8].rstrip().upper() == \
|
||||
'DEFAULT', "Unrecognized default value: %s" % \
|
||||
defexpr
|
||||
defvalue = defexpr[8:].strip()
|
||||
if defvalue == 'NULL':
|
||||
# Redundant
|
||||
defvalue = None
|
||||
col_d = {
|
||||
'name': name,
|
||||
'type': coltype,
|
||||
'nullable': not bool(row['null_flag']),
|
||||
'default': defvalue,
|
||||
'autoincrement': 'auto',
|
||||
}
|
||||
|
||||
if orig_colname.lower() == orig_colname:
|
||||
col_d['quote'] = True
|
||||
|
||||
# if the PK is a single field, try to see if its linked to
|
||||
# a sequence thru a trigger
|
||||
if len(pkey_cols) == 1 and name == pkey_cols[0]:
|
||||
seq_d = self.get_column_sequence(connection, tablename, name)
|
||||
if seq_d is not None:
|
||||
col_d['sequence'] = seq_d
|
||||
|
||||
cols.append(col_d)
|
||||
return cols
|
||||
|
||||
@reflection.cache
|
||||
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
|
||||
# Query to extract the details of each UK/FK of the given table
|
||||
fkqry = """
|
||||
SELECT rc.rdb$constraint_name AS cname,
|
||||
cse.rdb$field_name AS fname,
|
||||
ix2.rdb$relation_name AS targetrname,
|
||||
se.rdb$field_name AS targetfname
|
||||
FROM rdb$relation_constraints rc
|
||||
JOIN rdb$indices ix1 ON ix1.rdb$index_name=rc.rdb$index_name
|
||||
JOIN rdb$indices ix2 ON ix2.rdb$index_name=ix1.rdb$foreign_key
|
||||
JOIN rdb$index_segments cse ON
|
||||
cse.rdb$index_name=ix1.rdb$index_name
|
||||
JOIN rdb$index_segments se
|
||||
ON se.rdb$index_name=ix2.rdb$index_name
|
||||
AND se.rdb$field_position=cse.rdb$field_position
|
||||
WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=?
|
||||
ORDER BY se.rdb$index_name, se.rdb$field_position
|
||||
"""
|
||||
tablename = self.denormalize_name(table_name)
|
||||
|
||||
c = connection.execute(fkqry, ["FOREIGN KEY", tablename])
|
||||
fks = util.defaultdict(lambda: {
|
||||
'name': None,
|
||||
'constrained_columns': [],
|
||||
'referred_schema': None,
|
||||
'referred_table': None,
|
||||
'referred_columns': []
|
||||
})
|
||||
|
||||
for row in c:
|
||||
cname = self.normalize_name(row['cname'])
|
||||
fk = fks[cname]
|
||||
if not fk['name']:
|
||||
fk['name'] = cname
|
||||
fk['referred_table'] = self.normalize_name(row['targetrname'])
|
||||
fk['constrained_columns'].append(
|
||||
self.normalize_name(row['fname']))
|
||||
fk['referred_columns'].append(
|
||||
self.normalize_name(row['targetfname']))
|
||||
return list(fks.values())
|
||||
|
||||
@reflection.cache
|
||||
def get_indexes(self, connection, table_name, schema=None, **kw):
|
||||
qry = """
|
||||
SELECT ix.rdb$index_name AS index_name,
|
||||
ix.rdb$unique_flag AS unique_flag,
|
||||
ic.rdb$field_name AS field_name
|
||||
FROM rdb$indices ix
|
||||
JOIN rdb$index_segments ic
|
||||
ON ix.rdb$index_name=ic.rdb$index_name
|
||||
LEFT OUTER JOIN rdb$relation_constraints
|
||||
ON rdb$relation_constraints.rdb$index_name =
|
||||
ic.rdb$index_name
|
||||
WHERE ix.rdb$relation_name=? AND ix.rdb$foreign_key IS NULL
|
||||
AND rdb$relation_constraints.rdb$constraint_type IS NULL
|
||||
ORDER BY index_name, ic.rdb$field_position
|
||||
"""
|
||||
c = connection.execute(qry, [self.denormalize_name(table_name)])
|
||||
|
||||
indexes = util.defaultdict(dict)
|
||||
for row in c:
|
||||
indexrec = indexes[row['index_name']]
|
||||
if 'name' not in indexrec:
|
||||
indexrec['name'] = self.normalize_name(row['index_name'])
|
||||
indexrec['column_names'] = []
|
||||
indexrec['unique'] = bool(row['unique_flag'])
|
||||
|
||||
indexrec['column_names'].append(
|
||||
self.normalize_name(row['field_name']))
|
||||
|
||||
return list(indexes.values())
|
|
@ -0,0 +1,118 @@
|
|||
# firebird/fdb.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: firebird+fdb
|
||||
:name: fdb
|
||||
:dbapi: pyodbc
|
||||
:connectstring: firebird+fdb://user:password@host:port/path/to/db\
|
||||
[?key=value&key=value...]
|
||||
:url: http://pypi.python.org/pypi/fdb/
|
||||
|
||||
fdb is a kinterbasdb compatible DBAPI for Firebird.
|
||||
|
||||
.. versionadded:: 0.8 - Support for the fdb Firebird driver.
|
||||
|
||||
.. versionchanged:: 0.9 - The fdb dialect is now the default dialect
|
||||
under the ``firebird://`` URL space, as ``fdb`` is now the official
|
||||
Python driver for Firebird.
|
||||
|
||||
Arguments
|
||||
----------
|
||||
|
||||
The ``fdb`` dialect is based on the
|
||||
:mod:`sqlalchemy.dialects.firebird.kinterbasdb` dialect, however does not
|
||||
accept every argument that Kinterbasdb does.
|
||||
|
||||
* ``enable_rowcount`` - True by default, setting this to False disables
|
||||
the usage of "cursor.rowcount" with the
|
||||
Kinterbasdb dialect, which SQLAlchemy ordinarily calls upon automatically
|
||||
after any UPDATE or DELETE statement. When disabled, SQLAlchemy's
|
||||
ResultProxy will return -1 for result.rowcount. The rationale here is
|
||||
that Kinterbasdb requires a second round trip to the database when
|
||||
.rowcount is called - since SQLA's resultproxy automatically closes
|
||||
the cursor after a non-result-returning statement, rowcount must be
|
||||
called, if at all, before the result object is returned. Additionally,
|
||||
cursor.rowcount may not return correct results with older versions
|
||||
of Firebird, and setting this flag to False will also cause the
|
||||
SQLAlchemy ORM to ignore its usage. The behavior can also be controlled on a
|
||||
per-execution basis using the ``enable_rowcount`` option with
|
||||
:meth:`.Connection.execution_options`::
|
||||
|
||||
conn = engine.connect().execution_options(enable_rowcount=True)
|
||||
r = conn.execute(stmt)
|
||||
print r.rowcount
|
||||
|
||||
* ``retaining`` - False by default. Setting this to True will pass the
|
||||
``retaining=True`` keyword argument to the ``.commit()`` and ``.rollback()``
|
||||
methods of the DBAPI connection, which can improve performance in some
|
||||
situations, but apparently with significant caveats.
|
||||
Please read the fdb and/or kinterbasdb DBAPI documentation in order to
|
||||
understand the implications of this flag.
|
||||
|
||||
.. versionadded:: 0.8.2 - ``retaining`` keyword argument specifying
|
||||
transaction retaining behavior - in 0.8 it defaults to ``True``
|
||||
for backwards compatibility.
|
||||
|
||||
.. versionchanged:: 0.9.0 - the ``retaining`` flag defaults to ``False``.
|
||||
In 0.8 it defaulted to ``True``.
|
||||
|
||||
.. seealso::
|
||||
|
||||
http://pythonhosted.org/fdb/usage-guide.html#retaining-transactions
|
||||
- information on the "retaining" flag.
|
||||
|
||||
"""
|
||||
|
||||
from .kinterbasdb import FBDialect_kinterbasdb
|
||||
from ... import util
|
||||
|
||||
|
||||
class FBDialect_fdb(FBDialect_kinterbasdb):
|
||||
|
||||
def __init__(self, enable_rowcount=True,
|
||||
retaining=False, **kwargs):
|
||||
super(FBDialect_fdb, self).__init__(
|
||||
enable_rowcount=enable_rowcount,
|
||||
retaining=retaining, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('fdb')
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
if opts.get('port'):
|
||||
opts['host'] = "%s/%s" % (opts['host'], opts['port'])
|
||||
del opts['port']
|
||||
opts.update(url.query)
|
||||
|
||||
util.coerce_kw_type(opts, 'type_conv', int)
|
||||
|
||||
return ([], opts)
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
"""Get the version of the Firebird server used by a connection.
|
||||
|
||||
Returns a tuple of (`major`, `minor`, `build`), three integers
|
||||
representing the version of the attached server.
|
||||
"""
|
||||
|
||||
# This is the simpler approach (the other uses the services api),
|
||||
# that for backward compatibility reasons returns a string like
|
||||
# LI-V6.3.3.12981 Firebird 2.0
|
||||
# where the first version is a fake one resembling the old
|
||||
# Interbase signature.
|
||||
|
||||
isc_info_firebird_version = 103
|
||||
fbconn = connection.connection
|
||||
|
||||
version = fbconn.db_info(isc_info_firebird_version)
|
||||
|
||||
return self._parse_version_info(version)
|
||||
|
||||
dialect = FBDialect_fdb
|
|
@ -0,0 +1,184 @@
|
|||
# firebird/kinterbasdb.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: firebird+kinterbasdb
|
||||
:name: kinterbasdb
|
||||
:dbapi: kinterbasdb
|
||||
:connectstring: firebird+kinterbasdb://user:password@host:port/path/to/db\
|
||||
[?key=value&key=value...]
|
||||
:url: http://firebirdsql.org/index.php?op=devel&sub=python
|
||||
|
||||
Arguments
|
||||
----------
|
||||
|
||||
The Kinterbasdb backend accepts the ``enable_rowcount`` and ``retaining``
|
||||
arguments accepted by the :mod:`sqlalchemy.dialects.firebird.fdb` dialect.
|
||||
In addition, it also accepts the following:
|
||||
|
||||
* ``type_conv`` - select the kind of mapping done on the types: by default
|
||||
SQLAlchemy uses 200 with Unicode, datetime and decimal support. See
|
||||
the linked documents below for further information.
|
||||
|
||||
* ``concurrency_level`` - set the backend policy with regards to threading
|
||||
issues: by default SQLAlchemy uses policy 1. See the linked documents
|
||||
below for further information.
|
||||
|
||||
.. seealso::
|
||||
|
||||
http://sourceforge.net/projects/kinterbasdb
|
||||
|
||||
http://kinterbasdb.sourceforge.net/dist_docs/usage.html#adv_param_conv_dynamic_type_translation
|
||||
|
||||
http://kinterbasdb.sourceforge.net/dist_docs/usage.html#special_issue_concurrency
|
||||
|
||||
"""
|
||||
|
||||
from .base import FBDialect, FBExecutionContext
|
||||
from ... import util, types as sqltypes
|
||||
from re import match
|
||||
import decimal
|
||||
|
||||
|
||||
class _kinterbasdb_numeric(object):
|
||||
def bind_processor(self, dialect):
|
||||
def process(value):
|
||||
if isinstance(value, decimal.Decimal):
|
||||
return str(value)
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
class _FBNumeric_kinterbasdb(_kinterbasdb_numeric, sqltypes.Numeric):
|
||||
pass
|
||||
|
||||
|
||||
class _FBFloat_kinterbasdb(_kinterbasdb_numeric, sqltypes.Float):
|
||||
pass
|
||||
|
||||
|
||||
class FBExecutionContext_kinterbasdb(FBExecutionContext):
|
||||
@property
|
||||
def rowcount(self):
|
||||
if self.execution_options.get('enable_rowcount',
|
||||
self.dialect.enable_rowcount):
|
||||
return self.cursor.rowcount
|
||||
else:
|
||||
return -1
|
||||
|
||||
|
||||
class FBDialect_kinterbasdb(FBDialect):
|
||||
driver = 'kinterbasdb'
|
||||
supports_sane_rowcount = False
|
||||
supports_sane_multi_rowcount = False
|
||||
execution_ctx_cls = FBExecutionContext_kinterbasdb
|
||||
|
||||
supports_native_decimal = True
|
||||
|
||||
colspecs = util.update_copy(
|
||||
FBDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: _FBNumeric_kinterbasdb,
|
||||
sqltypes.Float: _FBFloat_kinterbasdb,
|
||||
}
|
||||
|
||||
)
|
||||
|
||||
def __init__(self, type_conv=200, concurrency_level=1,
|
||||
enable_rowcount=True,
|
||||
retaining=False, **kwargs):
|
||||
super(FBDialect_kinterbasdb, self).__init__(**kwargs)
|
||||
self.enable_rowcount = enable_rowcount
|
||||
self.type_conv = type_conv
|
||||
self.concurrency_level = concurrency_level
|
||||
self.retaining = retaining
|
||||
if enable_rowcount:
|
||||
self.supports_sane_rowcount = True
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('kinterbasdb')
|
||||
|
||||
def do_execute(self, cursor, statement, parameters, context=None):
|
||||
# kinterbase does not accept a None, but wants an empty list
|
||||
# when there are no arguments.
|
||||
cursor.execute(statement, parameters or [])
|
||||
|
||||
def do_rollback(self, dbapi_connection):
|
||||
dbapi_connection.rollback(self.retaining)
|
||||
|
||||
def do_commit(self, dbapi_connection):
|
||||
dbapi_connection.commit(self.retaining)
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
if opts.get('port'):
|
||||
opts['host'] = "%s/%s" % (opts['host'], opts['port'])
|
||||
del opts['port']
|
||||
opts.update(url.query)
|
||||
|
||||
util.coerce_kw_type(opts, 'type_conv', int)
|
||||
|
||||
type_conv = opts.pop('type_conv', self.type_conv)
|
||||
concurrency_level = opts.pop('concurrency_level',
|
||||
self.concurrency_level)
|
||||
|
||||
if self.dbapi is not None:
|
||||
initialized = getattr(self.dbapi, 'initialized', None)
|
||||
if initialized is None:
|
||||
# CVS rev 1.96 changed the name of the attribute:
|
||||
# http://kinterbasdb.cvs.sourceforge.net/viewvc/kinterbasdb/
|
||||
# Kinterbasdb-3.0/__init__.py?r1=1.95&r2=1.96
|
||||
initialized = getattr(self.dbapi, '_initialized', False)
|
||||
if not initialized:
|
||||
self.dbapi.init(type_conv=type_conv,
|
||||
concurrency_level=concurrency_level)
|
||||
return ([], opts)
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
"""Get the version of the Firebird server used by a connection.
|
||||
|
||||
Returns a tuple of (`major`, `minor`, `build`), three integers
|
||||
representing the version of the attached server.
|
||||
"""
|
||||
|
||||
# This is the simpler approach (the other uses the services api),
|
||||
# that for backward compatibility reasons returns a string like
|
||||
# LI-V6.3.3.12981 Firebird 2.0
|
||||
# where the first version is a fake one resembling the old
|
||||
# Interbase signature.
|
||||
|
||||
fbconn = connection.connection
|
||||
version = fbconn.server_version
|
||||
|
||||
return self._parse_version_info(version)
|
||||
|
||||
def _parse_version_info(self, version):
|
||||
m = match(
|
||||
r'\w+-V(\d+)\.(\d+)\.(\d+)\.(\d+)( \w+ (\d+)\.(\d+))?', version)
|
||||
if not m:
|
||||
raise AssertionError(
|
||||
"Could not determine version from string '%s'" % version)
|
||||
|
||||
if m.group(5) != None:
|
||||
return tuple([int(x) for x in m.group(6, 7, 4)] + ['firebird'])
|
||||
else:
|
||||
return tuple([int(x) for x in m.group(1, 2, 3)] + ['interbase'])
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, (self.dbapi.OperationalError,
|
||||
self.dbapi.ProgrammingError)):
|
||||
msg = str(e)
|
||||
return ('Unable to complete network request to host' in msg or
|
||||
'Invalid connection state' in msg or
|
||||
'Invalid cursor state' in msg or
|
||||
'connection shutdown' in msg)
|
||||
else:
|
||||
return False
|
||||
|
||||
dialect = FBDialect_kinterbasdb
|
|
@ -0,0 +1,26 @@
|
|||
# mssql/__init__.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from . import base, pyodbc, adodbapi, pymssql, zxjdbc, mxodbc # noqa
|
||||
|
||||
from .base import \
|
||||
INTEGER, BIGINT, SMALLINT, TINYINT, VARCHAR, NVARCHAR, CHAR, \
|
||||
NCHAR, TEXT, NTEXT, DECIMAL, NUMERIC, FLOAT, DATETIME,\
|
||||
DATETIME2, DATETIMEOFFSET, DATE, TIME, SMALLDATETIME, \
|
||||
BINARY, VARBINARY, BIT, REAL, IMAGE, TIMESTAMP, ROWVERSION, \
|
||||
MONEY, SMALLMONEY, UNIQUEIDENTIFIER, SQL_VARIANT, XML
|
||||
|
||||
base.dialect = dialect = pyodbc.dialect
|
||||
|
||||
|
||||
__all__ = (
|
||||
'INTEGER', 'BIGINT', 'SMALLINT', 'TINYINT', 'VARCHAR', 'NVARCHAR', 'CHAR',
|
||||
'NCHAR', 'TEXT', 'NTEXT', 'DECIMAL', 'NUMERIC', 'FLOAT', 'DATETIME',
|
||||
'DATETIME2', 'DATETIMEOFFSET', 'DATE', 'TIME', 'SMALLDATETIME',
|
||||
'BINARY', 'VARBINARY', 'BIT', 'REAL', 'IMAGE', 'TIMESTAMP', 'ROWVERSION',
|
||||
'MONEY', 'SMALLMONEY', 'UNIQUEIDENTIFIER', 'SQL_VARIANT', 'XML', 'dialect'
|
||||
)
|
|
@ -0,0 +1,87 @@
|
|||
# mssql/adodbapi.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: mssql+adodbapi
|
||||
:name: adodbapi
|
||||
:dbapi: adodbapi
|
||||
:connectstring: mssql+adodbapi://<username>:<password>@<dsnname>
|
||||
:url: http://adodbapi.sourceforge.net/
|
||||
|
||||
.. note::
|
||||
|
||||
The adodbapi dialect is not implemented SQLAlchemy versions 0.6 and
|
||||
above at this time.
|
||||
|
||||
"""
|
||||
import datetime
|
||||
from sqlalchemy import types as sqltypes, util
|
||||
from sqlalchemy.dialects.mssql.base import MSDateTime, MSDialect
|
||||
import sys
|
||||
|
||||
|
||||
class MSDateTime_adodbapi(MSDateTime):
|
||||
def result_processor(self, dialect, coltype):
|
||||
def process(value):
|
||||
# adodbapi will return datetimes with empty time
|
||||
# values as datetime.date() objects.
|
||||
# Promote them back to full datetime.datetime()
|
||||
if type(value) is datetime.date:
|
||||
return datetime.datetime(value.year, value.month, value.day)
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
class MSDialect_adodbapi(MSDialect):
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = True
|
||||
supports_unicode = sys.maxunicode == 65535
|
||||
supports_unicode_statements = True
|
||||
driver = 'adodbapi'
|
||||
|
||||
@classmethod
|
||||
def import_dbapi(cls):
|
||||
import adodbapi as module
|
||||
return module
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MSDialect.colspecs,
|
||||
{
|
||||
sqltypes.DateTime: MSDateTime_adodbapi
|
||||
}
|
||||
)
|
||||
|
||||
def create_connect_args(self, url):
|
||||
def check_quote(token):
|
||||
if ";" in str(token):
|
||||
token = "'%s'" % token
|
||||
return token
|
||||
|
||||
keys = dict(
|
||||
(k, check_quote(v)) for k, v in url.query.items()
|
||||
)
|
||||
|
||||
connectors = ["Provider=SQLOLEDB"]
|
||||
if 'port' in keys:
|
||||
connectors.append("Data Source=%s, %s" %
|
||||
(keys.get("host"), keys.get("port")))
|
||||
else:
|
||||
connectors.append("Data Source=%s" % keys.get("host"))
|
||||
connectors.append("Initial Catalog=%s" % keys.get("database"))
|
||||
user = keys.get("user")
|
||||
if user:
|
||||
connectors.append("User Id=%s" % user)
|
||||
connectors.append("Password=%s" % keys.get("password", ""))
|
||||
else:
|
||||
connectors.append("Integrated Security=SSPI")
|
||||
return [[";".join(connectors)], {}]
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
return isinstance(e, self.dbapi.adodbapi.DatabaseError) and \
|
||||
"'connection failure'" in str(e)
|
||||
|
||||
dialect = MSDialect_adodbapi
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,137 @@
|
|||
# mssql/information_schema.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
# TODO: should be using the sys. catalog with SQL Server, not information
|
||||
# schema
|
||||
|
||||
from ... import Table, MetaData, Column
|
||||
from ...types import String, Unicode, UnicodeText, Integer, TypeDecorator
|
||||
from ... import cast
|
||||
from ... import util
|
||||
from ...sql import expression
|
||||
from ...ext.compiler import compiles
|
||||
|
||||
ischema = MetaData()
|
||||
|
||||
|
||||
class CoerceUnicode(TypeDecorator):
|
||||
impl = Unicode
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
if util.py2k and isinstance(value, util.binary_type):
|
||||
value = value.decode(dialect.encoding)
|
||||
return value
|
||||
|
||||
def bind_expression(self, bindvalue):
|
||||
return _cast_on_2005(bindvalue)
|
||||
|
||||
|
||||
class _cast_on_2005(expression.ColumnElement):
|
||||
def __init__(self, bindvalue):
|
||||
self.bindvalue = bindvalue
|
||||
|
||||
|
||||
@compiles(_cast_on_2005)
|
||||
def _compile(element, compiler, **kw):
|
||||
from . import base
|
||||
if compiler.dialect.server_version_info is None or \
|
||||
compiler.dialect.server_version_info < base.MS_2005_VERSION:
|
||||
return compiler.process(element.bindvalue, **kw)
|
||||
else:
|
||||
return compiler.process(cast(element.bindvalue, Unicode), **kw)
|
||||
|
||||
schemata = Table("SCHEMATA", ischema,
|
||||
Column("CATALOG_NAME", CoerceUnicode, key="catalog_name"),
|
||||
Column("SCHEMA_NAME", CoerceUnicode, key="schema_name"),
|
||||
Column("SCHEMA_OWNER", CoerceUnicode, key="schema_owner"),
|
||||
schema="INFORMATION_SCHEMA")
|
||||
|
||||
tables = Table("TABLES", ischema,
|
||||
Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"),
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column(
|
||||
"TABLE_TYPE", String(convert_unicode=True),
|
||||
key="table_type"),
|
||||
schema="INFORMATION_SCHEMA")
|
||||
|
||||
columns = Table("COLUMNS", ischema,
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
|
||||
Column("IS_NULLABLE", Integer, key="is_nullable"),
|
||||
Column("DATA_TYPE", String, key="data_type"),
|
||||
Column("ORDINAL_POSITION", Integer, key="ordinal_position"),
|
||||
Column("CHARACTER_MAXIMUM_LENGTH", Integer,
|
||||
key="character_maximum_length"),
|
||||
Column("NUMERIC_PRECISION", Integer, key="numeric_precision"),
|
||||
Column("NUMERIC_SCALE", Integer, key="numeric_scale"),
|
||||
Column("COLUMN_DEFAULT", Integer, key="column_default"),
|
||||
Column("COLLATION_NAME", String, key="collation_name"),
|
||||
schema="INFORMATION_SCHEMA")
|
||||
|
||||
constraints = Table("TABLE_CONSTRAINTS", ischema,
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column("CONSTRAINT_NAME", CoerceUnicode,
|
||||
key="constraint_name"),
|
||||
Column("CONSTRAINT_TYPE", String(
|
||||
convert_unicode=True), key="constraint_type"),
|
||||
schema="INFORMATION_SCHEMA")
|
||||
|
||||
column_constraints = Table("CONSTRAINT_COLUMN_USAGE", ischema,
|
||||
Column("TABLE_SCHEMA", CoerceUnicode,
|
||||
key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode,
|
||||
key="table_name"),
|
||||
Column("COLUMN_NAME", CoerceUnicode,
|
||||
key="column_name"),
|
||||
Column("CONSTRAINT_NAME", CoerceUnicode,
|
||||
key="constraint_name"),
|
||||
schema="INFORMATION_SCHEMA")
|
||||
|
||||
key_constraints = Table("KEY_COLUMN_USAGE", ischema,
|
||||
Column("TABLE_SCHEMA", CoerceUnicode,
|
||||
key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode,
|
||||
key="table_name"),
|
||||
Column("COLUMN_NAME", CoerceUnicode,
|
||||
key="column_name"),
|
||||
Column("CONSTRAINT_NAME", CoerceUnicode,
|
||||
key="constraint_name"),
|
||||
Column("ORDINAL_POSITION", Integer,
|
||||
key="ordinal_position"),
|
||||
schema="INFORMATION_SCHEMA")
|
||||
|
||||
ref_constraints = Table("REFERENTIAL_CONSTRAINTS", ischema,
|
||||
Column("CONSTRAINT_CATALOG", CoerceUnicode,
|
||||
key="constraint_catalog"),
|
||||
Column("CONSTRAINT_SCHEMA", CoerceUnicode,
|
||||
key="constraint_schema"),
|
||||
Column("CONSTRAINT_NAME", CoerceUnicode,
|
||||
key="constraint_name"),
|
||||
# TODO: is CATLOG misspelled ?
|
||||
Column("UNIQUE_CONSTRAINT_CATLOG", CoerceUnicode,
|
||||
key="unique_constraint_catalog"),
|
||||
|
||||
Column("UNIQUE_CONSTRAINT_SCHEMA", CoerceUnicode,
|
||||
key="unique_constraint_schema"),
|
||||
Column("UNIQUE_CONSTRAINT_NAME", CoerceUnicode,
|
||||
key="unique_constraint_name"),
|
||||
Column("MATCH_OPTION", String, key="match_option"),
|
||||
Column("UPDATE_RULE", String, key="update_rule"),
|
||||
Column("DELETE_RULE", String, key="delete_rule"),
|
||||
schema="INFORMATION_SCHEMA")
|
||||
|
||||
views = Table("VIEWS", ischema,
|
||||
Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"),
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column("VIEW_DEFINITION", CoerceUnicode, key="view_definition"),
|
||||
Column("CHECK_OPTION", String, key="check_option"),
|
||||
Column("IS_UPDATABLE", String, key="is_updatable"),
|
||||
schema="INFORMATION_SCHEMA")
|
|
@ -0,0 +1,139 @@
|
|||
# mssql/mxodbc.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: mssql+mxodbc
|
||||
:name: mxODBC
|
||||
:dbapi: mxodbc
|
||||
:connectstring: mssql+mxodbc://<username>:<password>@<dsnname>
|
||||
:url: http://www.egenix.com/
|
||||
|
||||
Execution Modes
|
||||
---------------
|
||||
|
||||
mxODBC features two styles of statement execution, using the
|
||||
``cursor.execute()`` and ``cursor.executedirect()`` methods (the second being
|
||||
an extension to the DBAPI specification). The former makes use of a particular
|
||||
API call specific to the SQL Server Native Client ODBC driver known
|
||||
SQLDescribeParam, while the latter does not.
|
||||
|
||||
mxODBC apparently only makes repeated use of a single prepared statement
|
||||
when SQLDescribeParam is used. The advantage to prepared statement reuse is
|
||||
one of performance. The disadvantage is that SQLDescribeParam has a limited
|
||||
set of scenarios in which bind parameters are understood, including that they
|
||||
cannot be placed within the argument lists of function calls, anywhere outside
|
||||
the FROM, or even within subqueries within the FROM clause - making the usage
|
||||
of bind parameters within SELECT statements impossible for all but the most
|
||||
simplistic statements.
|
||||
|
||||
For this reason, the mxODBC dialect uses the "native" mode by default only for
|
||||
INSERT, UPDATE, and DELETE statements, and uses the escaped string mode for
|
||||
all other statements.
|
||||
|
||||
This behavior can be controlled via
|
||||
:meth:`~sqlalchemy.sql.expression.Executable.execution_options` using the
|
||||
``native_odbc_execute`` flag with a value of ``True`` or ``False``, where a
|
||||
value of ``True`` will unconditionally use native bind parameters and a value
|
||||
of ``False`` will unconditionally use string-escaped parameters.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
from ... import types as sqltypes
|
||||
from ...connectors.mxodbc import MxODBCConnector
|
||||
from .pyodbc import MSExecutionContext_pyodbc, _MSNumeric_pyodbc
|
||||
from .base import (MSDialect,
|
||||
MSSQLStrictCompiler,
|
||||
VARBINARY,
|
||||
_MSDateTime, _MSDate, _MSTime)
|
||||
|
||||
|
||||
class _MSNumeric_mxodbc(_MSNumeric_pyodbc):
|
||||
"""Include pyodbc's numeric processor.
|
||||
"""
|
||||
|
||||
|
||||
class _MSDate_mxodbc(_MSDate):
|
||||
def bind_processor(self, dialect):
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return "%s-%s-%s" % (value.year, value.month, value.day)
|
||||
else:
|
||||
return None
|
||||
return process
|
||||
|
||||
|
||||
class _MSTime_mxodbc(_MSTime):
|
||||
def bind_processor(self, dialect):
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return "%s:%s:%s" % (value.hour, value.minute, value.second)
|
||||
else:
|
||||
return None
|
||||
return process
|
||||
|
||||
|
||||
class _VARBINARY_mxodbc(VARBINARY):
|
||||
|
||||
"""
|
||||
mxODBC Support for VARBINARY column types.
|
||||
|
||||
This handles the special case for null VARBINARY values,
|
||||
which maps None values to the mx.ODBC.Manager.BinaryNull symbol.
|
||||
"""
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
if dialect.dbapi is None:
|
||||
return None
|
||||
|
||||
DBAPIBinary = dialect.dbapi.Binary
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return DBAPIBinary(value)
|
||||
else:
|
||||
# should pull from mx.ODBC.Manager.BinaryNull
|
||||
return dialect.dbapi.BinaryNull
|
||||
return process
|
||||
|
||||
|
||||
class MSExecutionContext_mxodbc(MSExecutionContext_pyodbc):
|
||||
"""
|
||||
The pyodbc execution context is useful for enabling
|
||||
SELECT SCOPE_IDENTITY in cases where OUTPUT clause
|
||||
does not work (tables with insert triggers).
|
||||
"""
|
||||
# todo - investigate whether the pyodbc execution context
|
||||
# is really only being used in cases where OUTPUT
|
||||
# won't work.
|
||||
|
||||
|
||||
class MSDialect_mxodbc(MxODBCConnector, MSDialect):
|
||||
|
||||
# this is only needed if "native ODBC" mode is used,
|
||||
# which is now disabled by default.
|
||||
# statement_compiler = MSSQLStrictCompiler
|
||||
|
||||
execution_ctx_cls = MSExecutionContext_mxodbc
|
||||
|
||||
# flag used by _MSNumeric_mxodbc
|
||||
_need_decimal_fix = True
|
||||
|
||||
colspecs = {
|
||||
sqltypes.Numeric: _MSNumeric_mxodbc,
|
||||
sqltypes.DateTime: _MSDateTime,
|
||||
sqltypes.Date: _MSDate_mxodbc,
|
||||
sqltypes.Time: _MSTime_mxodbc,
|
||||
VARBINARY: _VARBINARY_mxodbc,
|
||||
sqltypes.LargeBinary: _VARBINARY_mxodbc,
|
||||
}
|
||||
|
||||
def __init__(self, description_encoding=None, **params):
|
||||
super(MSDialect_mxodbc, self).__init__(**params)
|
||||
self.description_encoding = description_encoding
|
||||
|
||||
dialect = MSDialect_mxodbc
|
|
@ -0,0 +1,116 @@
|
|||
# mssql/pymssql.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: mssql+pymssql
|
||||
:name: pymssql
|
||||
:dbapi: pymssql
|
||||
:connectstring: mssql+pymssql://<username>:<password>@<freetds_name>/?\
|
||||
charset=utf8
|
||||
:url: http://pymssql.org/
|
||||
|
||||
pymssql is a Python module that provides a Python DBAPI interface around
|
||||
`FreeTDS <http://www.freetds.org/>`_. Compatible builds are available for
|
||||
Linux, MacOSX and Windows platforms.
|
||||
|
||||
Modern versions of this driver work very well with SQL Server and
|
||||
FreeTDS from Linux and is highly recommended.
|
||||
|
||||
"""
|
||||
from .base import MSDialect, MSIdentifierPreparer
|
||||
from ... import types as sqltypes, util, processors
|
||||
import re
|
||||
|
||||
|
||||
class _MSNumeric_pymssql(sqltypes.Numeric):
|
||||
def result_processor(self, dialect, type_):
|
||||
if not self.asdecimal:
|
||||
return processors.to_float
|
||||
else:
|
||||
return sqltypes.Numeric.result_processor(self, dialect, type_)
|
||||
|
||||
|
||||
class MSIdentifierPreparer_pymssql(MSIdentifierPreparer):
|
||||
|
||||
def __init__(self, dialect):
|
||||
super(MSIdentifierPreparer_pymssql, self).__init__(dialect)
|
||||
# pymssql has the very unusual behavior that it uses pyformat
|
||||
# yet does not require that percent signs be doubled
|
||||
self._double_percents = False
|
||||
|
||||
|
||||
class MSDialect_pymssql(MSDialect):
|
||||
supports_native_decimal = True
|
||||
driver = 'pymssql'
|
||||
|
||||
preparer = MSIdentifierPreparer_pymssql
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MSDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: _MSNumeric_pymssql,
|
||||
sqltypes.Float: sqltypes.Float,
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
module = __import__('pymssql')
|
||||
# pymmsql < 2.1.1 doesn't have a Binary method. we use string
|
||||
client_ver = tuple(int(x) for x in module.__version__.split("."))
|
||||
if client_ver < (2, 1, 1):
|
||||
# TODO: monkeypatching here is less than ideal
|
||||
module.Binary = lambda x: x if hasattr(x, 'decode') else str(x)
|
||||
|
||||
if client_ver < (1, ):
|
||||
util.warn("The pymssql dialect expects at least "
|
||||
"the 1.0 series of the pymssql DBAPI.")
|
||||
return module
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
vers = connection.scalar("select @@version")
|
||||
m = re.match(
|
||||
r"Microsoft .*? - (\d+).(\d+).(\d+).(\d+)", vers)
|
||||
if m:
|
||||
return tuple(int(x) for x in m.group(1, 2, 3, 4))
|
||||
else:
|
||||
return None
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
opts.update(url.query)
|
||||
port = opts.pop('port', None)
|
||||
if port and 'host' in opts:
|
||||
opts['host'] = "%s:%s" % (opts['host'], port)
|
||||
return [[], opts]
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
for msg in (
|
||||
"Adaptive Server connection timed out",
|
||||
"Net-Lib error during Connection reset by peer",
|
||||
"message 20003", # connection timeout
|
||||
"Error 10054",
|
||||
"Not connected to any MS SQL server",
|
||||
"Connection is closed",
|
||||
"message 20006", # Write to the server failed
|
||||
"message 20017", # Unexpected EOF from the server
|
||||
):
|
||||
if msg in str(e):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def set_isolation_level(self, connection, level):
|
||||
if level == 'AUTOCOMMIT':
|
||||
connection.autocommit(True)
|
||||
else:
|
||||
connection.autocommit(False)
|
||||
super(MSDialect_pymssql, self).set_isolation_level(connection,
|
||||
level)
|
||||
|
||||
|
||||
dialect = MSDialect_pymssql
|
|
@ -0,0 +1,310 @@
|
|||
# mssql/pyodbc.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
r"""
|
||||
.. dialect:: mssql+pyodbc
|
||||
:name: PyODBC
|
||||
:dbapi: pyodbc
|
||||
:connectstring: mssql+pyodbc://<username>:<password>@<dsnname>
|
||||
:url: http://pypi.python.org/pypi/pyodbc/
|
||||
|
||||
Connecting to PyODBC
|
||||
--------------------
|
||||
|
||||
The URL here is to be translated to PyODBC connection strings, as
|
||||
detailed in `ConnectionStrings <https://code.google.com/p/pyodbc/wiki/ConnectionStrings>`_.
|
||||
|
||||
DSN Connections
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
A DSN-based connection is **preferred** overall when using ODBC. A
|
||||
basic DSN-based connection looks like::
|
||||
|
||||
engine = create_engine("mssql+pyodbc://scott:tiger@some_dsn")
|
||||
|
||||
Which above, will pass the following connection string to PyODBC::
|
||||
|
||||
dsn=mydsn;UID=user;PWD=pass
|
||||
|
||||
If the username and password are omitted, the DSN form will also add
|
||||
the ``Trusted_Connection=yes`` directive to the ODBC string.
|
||||
|
||||
Hostname Connections
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Hostname-based connections are **not preferred**, however are supported.
|
||||
The ODBC driver name must be explicitly specified::
|
||||
|
||||
engine = create_engine("mssql+pyodbc://scott:tiger@myhost:port/databasename?driver=SQL+Server+Native+Client+10.0")
|
||||
|
||||
.. versionchanged:: 1.0.0 Hostname-based PyODBC connections now require the
|
||||
SQL Server driver name specified explicitly. SQLAlchemy cannot
|
||||
choose an optimal default here as it varies based on platform
|
||||
and installed drivers.
|
||||
|
||||
Other keywords interpreted by the Pyodbc dialect to be passed to
|
||||
``pyodbc.connect()`` in both the DSN and hostname cases include:
|
||||
``odbc_autotranslate``, ``ansi``, ``unicode_results``, ``autocommit``.
|
||||
|
||||
Pass through exact Pyodbc string
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A PyODBC connection string can also be sent exactly as specified in
|
||||
`ConnectionStrings <https://code.google.com/p/pyodbc/wiki/ConnectionStrings>`_
|
||||
into the driver using the parameter ``odbc_connect``. The delimeters must be URL escaped, however,
|
||||
as illustrated below using ``urllib.quote_plus``::
|
||||
|
||||
import urllib
|
||||
params = urllib.quote_plus("DRIVER={SQL Server Native Client 10.0};SERVER=dagger;DATABASE=test;UID=user;PWD=password")
|
||||
|
||||
engine = create_engine("mssql+pyodbc:///?odbc_connect=%s" % params)
|
||||
|
||||
|
||||
Driver / Unicode Support
|
||||
-------------------------
|
||||
|
||||
PyODBC works best with Microsoft ODBC drivers, particularly in the area
|
||||
of Unicode support on both Python 2 and Python 3.
|
||||
|
||||
Using the FreeTDS ODBC drivers on Linux or OSX with PyODBC is **not**
|
||||
recommended; there have been historically many Unicode-related issues
|
||||
in this area, including before Microsoft offered ODBC drivers for Linux
|
||||
and OSX. Now that Microsoft offers drivers for all platforms, for
|
||||
PyODBC support these are recommended. FreeTDS remains relevant for
|
||||
non-ODBC drivers such as pymssql where it works very well.
|
||||
|
||||
|
||||
Rowcount Support
|
||||
----------------
|
||||
|
||||
Pyodbc only has partial support for rowcount. See the notes at
|
||||
:ref:`mssql_rowcount_versioning` for important notes when using ORM
|
||||
versioning.
|
||||
|
||||
"""
|
||||
|
||||
from .base import MSExecutionContext, MSDialect, BINARY, VARBINARY
|
||||
from ...connectors.pyodbc import PyODBCConnector
|
||||
from ... import types as sqltypes, util, exc
|
||||
import decimal
|
||||
import re
|
||||
|
||||
|
||||
class _ms_numeric_pyodbc(object):
|
||||
|
||||
"""Turns Decimals with adjusted() < 0 or > 7 into strings.
|
||||
|
||||
The routines here are needed for older pyodbc versions
|
||||
as well as current mxODBC versions.
|
||||
|
||||
"""
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
|
||||
super_process = super(_ms_numeric_pyodbc, self).\
|
||||
bind_processor(dialect)
|
||||
|
||||
if not dialect._need_decimal_fix:
|
||||
return super_process
|
||||
|
||||
def process(value):
|
||||
if self.asdecimal and \
|
||||
isinstance(value, decimal.Decimal):
|
||||
adjusted = value.adjusted()
|
||||
if adjusted < 0:
|
||||
return self._small_dec_to_string(value)
|
||||
elif adjusted > 7:
|
||||
return self._large_dec_to_string(value)
|
||||
|
||||
if super_process:
|
||||
return super_process(value)
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
||||
# these routines needed for older versions of pyodbc.
|
||||
# as of 2.1.8 this logic is integrated.
|
||||
|
||||
def _small_dec_to_string(self, value):
|
||||
return "%s0.%s%s" % (
|
||||
(value < 0 and '-' or ''),
|
||||
'0' * (abs(value.adjusted()) - 1),
|
||||
"".join([str(nint) for nint in value.as_tuple()[1]]))
|
||||
|
||||
def _large_dec_to_string(self, value):
|
||||
_int = value.as_tuple()[1]
|
||||
if 'E' in str(value):
|
||||
result = "%s%s%s" % (
|
||||
(value < 0 and '-' or ''),
|
||||
"".join([str(s) for s in _int]),
|
||||
"0" * (value.adjusted() - (len(_int) - 1)))
|
||||
else:
|
||||
if (len(_int) - 1) > value.adjusted():
|
||||
result = "%s%s.%s" % (
|
||||
(value < 0 and '-' or ''),
|
||||
"".join(
|
||||
[str(s) for s in _int][0:value.adjusted() + 1]),
|
||||
"".join(
|
||||
[str(s) for s in _int][value.adjusted() + 1:]))
|
||||
else:
|
||||
result = "%s%s" % (
|
||||
(value < 0 and '-' or ''),
|
||||
"".join(
|
||||
[str(s) for s in _int][0:value.adjusted() + 1]))
|
||||
return result
|
||||
|
||||
|
||||
class _MSNumeric_pyodbc(_ms_numeric_pyodbc, sqltypes.Numeric):
|
||||
pass
|
||||
|
||||
|
||||
class _MSFloat_pyodbc(_ms_numeric_pyodbc, sqltypes.Float):
|
||||
pass
|
||||
|
||||
|
||||
class _ms_binary_pyodbc(object):
|
||||
"""Wraps binary values in dialect-specific Binary wrapper.
|
||||
If the value is null, return a pyodbc-specific BinaryNull
|
||||
object to prevent pyODBC [and FreeTDS] from defaulting binary
|
||||
NULL types to SQLWCHAR and causing implicit conversion errors.
|
||||
"""
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
if dialect.dbapi is None:
|
||||
return None
|
||||
|
||||
DBAPIBinary = dialect.dbapi.Binary
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return DBAPIBinary(value)
|
||||
else:
|
||||
# pyodbc-specific
|
||||
return dialect.dbapi.BinaryNull
|
||||
return process
|
||||
|
||||
|
||||
class _VARBINARY_pyodbc(_ms_binary_pyodbc, VARBINARY):
|
||||
pass
|
||||
|
||||
|
||||
class _BINARY_pyodbc(_ms_binary_pyodbc, BINARY):
|
||||
pass
|
||||
|
||||
|
||||
class MSExecutionContext_pyodbc(MSExecutionContext):
|
||||
_embedded_scope_identity = False
|
||||
|
||||
def pre_exec(self):
|
||||
"""where appropriate, issue "select scope_identity()" in the same
|
||||
statement.
|
||||
|
||||
Background on why "scope_identity()" is preferable to "@@identity":
|
||||
http://msdn.microsoft.com/en-us/library/ms190315.aspx
|
||||
|
||||
Background on why we attempt to embed "scope_identity()" into the same
|
||||
statement as the INSERT:
|
||||
http://code.google.com/p/pyodbc/wiki/FAQs#How_do_I_retrieve_autogenerated/identity_values?
|
||||
|
||||
"""
|
||||
|
||||
super(MSExecutionContext_pyodbc, self).pre_exec()
|
||||
|
||||
# don't embed the scope_identity select into an
|
||||
# "INSERT .. DEFAULT VALUES"
|
||||
if self._select_lastrowid and \
|
||||
self.dialect.use_scope_identity and \
|
||||
len(self.parameters[0]):
|
||||
self._embedded_scope_identity = True
|
||||
|
||||
self.statement += "; select scope_identity()"
|
||||
|
||||
def post_exec(self):
|
||||
if self._embedded_scope_identity:
|
||||
# Fetch the last inserted id from the manipulated statement
|
||||
# We may have to skip over a number of result sets with
|
||||
# no data (due to triggers, etc.)
|
||||
while True:
|
||||
try:
|
||||
# fetchall() ensures the cursor is consumed
|
||||
# without closing it (FreeTDS particularly)
|
||||
row = self.cursor.fetchall()[0]
|
||||
break
|
||||
except self.dialect.dbapi.Error as e:
|
||||
# no way around this - nextset() consumes the previous set
|
||||
# so we need to just keep flipping
|
||||
self.cursor.nextset()
|
||||
|
||||
self._lastrowid = int(row[0])
|
||||
else:
|
||||
super(MSExecutionContext_pyodbc, self).post_exec()
|
||||
|
||||
|
||||
class MSDialect_pyodbc(PyODBCConnector, MSDialect):
|
||||
|
||||
execution_ctx_cls = MSExecutionContext_pyodbc
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MSDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: _MSNumeric_pyodbc,
|
||||
sqltypes.Float: _MSFloat_pyodbc,
|
||||
BINARY: _BINARY_pyodbc,
|
||||
|
||||
# SQL Server dialect has a VARBINARY that is just to support
|
||||
# "deprecate_large_types" w/ VARBINARY(max), but also we must
|
||||
# handle the usual SQL standard VARBINARY
|
||||
VARBINARY: _VARBINARY_pyodbc,
|
||||
sqltypes.VARBINARY: _VARBINARY_pyodbc,
|
||||
sqltypes.LargeBinary: _VARBINARY_pyodbc,
|
||||
}
|
||||
)
|
||||
|
||||
def __init__(self, description_encoding=None, **params):
|
||||
if 'description_encoding' in params:
|
||||
self.description_encoding = params.pop('description_encoding')
|
||||
super(MSDialect_pyodbc, self).__init__(**params)
|
||||
self.use_scope_identity = self.use_scope_identity and \
|
||||
self.dbapi and \
|
||||
hasattr(self.dbapi.Cursor, 'nextset')
|
||||
self._need_decimal_fix = self.dbapi and \
|
||||
self._dbapi_version() < (2, 1, 8)
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
try:
|
||||
# "Version of the instance of SQL Server, in the form
|
||||
# of 'major.minor.build.revision'"
|
||||
raw = connection.scalar(
|
||||
"SELECT CAST(SERVERPROPERTY('ProductVersion') AS VARCHAR)")
|
||||
except exc.DBAPIError:
|
||||
# SQL Server docs indicate this function isn't present prior to
|
||||
# 2008. Before we had the VARCHAR cast above, pyodbc would also
|
||||
# fail on this query.
|
||||
return super(MSDialect_pyodbc, self).\
|
||||
_get_server_version_info(connection, allow_chars=False)
|
||||
else:
|
||||
version = []
|
||||
r = re.compile(r'[.\-]')
|
||||
for n in r.split(raw):
|
||||
try:
|
||||
version.append(int(n))
|
||||
except ValueError:
|
||||
pass
|
||||
return tuple(version)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, self.dbapi.Error):
|
||||
for code in (
|
||||
'08S01', '01002', '08003', '08007',
|
||||
'08S02', '08001', 'HYT00', 'HY010',
|
||||
'10054'):
|
||||
if code in str(e):
|
||||
return True
|
||||
return super(MSDialect_pyodbc, self).is_disconnect(
|
||||
e, connection, cursor)
|
||||
|
||||
dialect = MSDialect_pyodbc
|
|
@ -0,0 +1,69 @@
|
|||
# mssql/zxjdbc.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: mssql+zxjdbc
|
||||
:name: zxJDBC for Jython
|
||||
:dbapi: zxjdbc
|
||||
:connectstring: mssql+zxjdbc://user:pass@host:port/dbname\
|
||||
[?key=value&key=value...]
|
||||
:driverurl: http://jtds.sourceforge.net/
|
||||
|
||||
.. note:: Jython is not supported by current versions of SQLAlchemy. The
|
||||
zxjdbc dialect should be considered as experimental.
|
||||
|
||||
"""
|
||||
from ...connectors.zxJDBC import ZxJDBCConnector
|
||||
from .base import MSDialect, MSExecutionContext
|
||||
from ... import engine
|
||||
|
||||
|
||||
class MSExecutionContext_zxjdbc(MSExecutionContext):
|
||||
|
||||
_embedded_scope_identity = False
|
||||
|
||||
def pre_exec(self):
|
||||
super(MSExecutionContext_zxjdbc, self).pre_exec()
|
||||
# scope_identity after the fact returns null in jTDS so we must
|
||||
# embed it
|
||||
if self._select_lastrowid and self.dialect.use_scope_identity:
|
||||
self._embedded_scope_identity = True
|
||||
self.statement += "; SELECT scope_identity()"
|
||||
|
||||
def post_exec(self):
|
||||
if self._embedded_scope_identity:
|
||||
while True:
|
||||
try:
|
||||
row = self.cursor.fetchall()[0]
|
||||
break
|
||||
except self.dialect.dbapi.Error:
|
||||
self.cursor.nextset()
|
||||
self._lastrowid = int(row[0])
|
||||
|
||||
if (self.isinsert or self.isupdate or self.isdelete) and \
|
||||
self.compiled.returning:
|
||||
self._result_proxy = engine.FullyBufferedResultProxy(self)
|
||||
|
||||
if self._enable_identity_insert:
|
||||
table = self.dialect.identifier_preparer.format_table(
|
||||
self.compiled.statement.table)
|
||||
self.cursor.execute("SET IDENTITY_INSERT %s OFF" % table)
|
||||
|
||||
|
||||
class MSDialect_zxjdbc(ZxJDBCConnector, MSDialect):
|
||||
jdbc_db_name = 'jtds:sqlserver'
|
||||
jdbc_driver_name = 'net.sourceforge.jtds.jdbc.Driver'
|
||||
|
||||
execution_ctx_cls = MSExecutionContext_zxjdbc
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
return tuple(
|
||||
int(x)
|
||||
for x in connection.connection.dbversion.split('.')
|
||||
)
|
||||
|
||||
dialect = MSDialect_zxjdbc
|
|
@ -0,0 +1,34 @@
|
|||
# mysql/__init__.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from . import base, mysqldb, oursql, \
|
||||
pyodbc, zxjdbc, mysqlconnector, pymysql, \
|
||||
gaerdbms, cymysql
|
||||
|
||||
from .base import \
|
||||
BIGINT, BINARY, BIT, BLOB, BOOLEAN, CHAR, DATE, DATETIME, \
|
||||
DECIMAL, DOUBLE, ENUM, DECIMAL,\
|
||||
FLOAT, INTEGER, INTEGER, JSON, LONGBLOB, LONGTEXT, MEDIUMBLOB, \
|
||||
MEDIUMINT, MEDIUMTEXT, NCHAR, \
|
||||
NVARCHAR, NUMERIC, SET, SMALLINT, REAL, TEXT, TIME, TIMESTAMP, \
|
||||
TINYBLOB, TINYINT, TINYTEXT,\
|
||||
VARBINARY, VARCHAR, YEAR
|
||||
|
||||
from .dml import insert, Insert
|
||||
|
||||
# default dialect
|
||||
base.dialect = dialect = mysqldb.dialect
|
||||
|
||||
|
||||
__all__ = (
|
||||
'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME',
|
||||
'DECIMAL', 'DOUBLE', 'ENUM', 'DECIMAL', 'FLOAT', 'INTEGER', 'INTEGER',
|
||||
'JSON', 'LONGBLOB', 'LONGTEXT', 'MEDIUMBLOB', 'MEDIUMINT', 'MEDIUMTEXT',
|
||||
'NCHAR', 'NVARCHAR', 'NUMERIC', 'SET', 'SMALLINT', 'REAL', 'TEXT', 'TIME',
|
||||
'TIMESTAMP', 'TINYBLOB', 'TINYINT', 'TINYTEXT', 'VARBINARY', 'VARCHAR',
|
||||
'YEAR', 'dialect'
|
||||
)
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,76 @@
|
|||
# mysql/cymysql.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
|
||||
.. dialect:: mysql+cymysql
|
||||
:name: CyMySQL
|
||||
:dbapi: cymysql
|
||||
:connectstring: mysql+cymysql://<username>:<password>@<host>/<dbname>\
|
||||
[?<options>]
|
||||
:url: https://github.com/nakagami/CyMySQL
|
||||
|
||||
"""
|
||||
import re
|
||||
|
||||
from .mysqldb import MySQLDialect_mysqldb
|
||||
from .base import (BIT, MySQLDialect)
|
||||
from ... import util
|
||||
|
||||
|
||||
class _cymysqlBIT(BIT):
|
||||
def result_processor(self, dialect, coltype):
|
||||
"""Convert a MySQL's 64 bit, variable length binary string to a long.
|
||||
"""
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
v = 0
|
||||
for i in util.iterbytes(value):
|
||||
v = v << 8 | i
|
||||
return v
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
class MySQLDialect_cymysql(MySQLDialect_mysqldb):
|
||||
driver = 'cymysql'
|
||||
|
||||
description_encoding = None
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = False
|
||||
supports_unicode_statements = True
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MySQLDialect.colspecs,
|
||||
{
|
||||
BIT: _cymysqlBIT,
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('cymysql')
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
return connection.connection.charset
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
return exception.errno
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, self.dbapi.OperationalError):
|
||||
return self._extract_error_code(e) in \
|
||||
(2006, 2013, 2014, 2045, 2055)
|
||||
elif isinstance(e, self.dbapi.InterfaceError):
|
||||
# if underlying connection is closed,
|
||||
# this is the error you get
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
dialect = MySQLDialect_cymysql
|
|
@ -0,0 +1,80 @@
|
|||
from ...sql.elements import ClauseElement
|
||||
from ...sql.dml import Insert as StandardInsert
|
||||
from ...sql.expression import alias
|
||||
from ...util.langhelpers import public_factory
|
||||
from ...sql.base import _generative
|
||||
from ... import util
|
||||
|
||||
__all__ = ('Insert', 'insert')
|
||||
|
||||
|
||||
class Insert(StandardInsert):
|
||||
"""MySQL-specific implementation of INSERT.
|
||||
|
||||
Adds methods for MySQL-specific syntaxes such as ON DUPLICATE KEY UPDATE.
|
||||
|
||||
.. versionadded:: 1.2
|
||||
|
||||
"""
|
||||
|
||||
@property
|
||||
def inserted(self):
|
||||
"""Provide the "inserted" namespace for an ON DUPLICATE KEY UPDATE statement
|
||||
|
||||
MySQL's ON DUPLICATE KEY UPDATE clause allows reference to the row
|
||||
that would be inserted, via a special function called ``VALUES()``.
|
||||
This attribute provides all columns in this row to be referenaceable
|
||||
such that they will render within a ``VALUES()`` function inside the
|
||||
ON DUPLICATE KEY UPDATE clause. The attribute is named ``.inserted``
|
||||
so as not to conflict with the existing :meth:`.Insert.values` method.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`mysql_insert_on_duplicate_key_update` - example of how
|
||||
to use :attr:`.Insert.inserted`
|
||||
|
||||
"""
|
||||
return self.inserted_alias.columns
|
||||
|
||||
@util.memoized_property
|
||||
def inserted_alias(self):
|
||||
return alias(self.table, name='inserted')
|
||||
|
||||
@_generative
|
||||
def on_duplicate_key_update(self, **kw):
|
||||
r"""
|
||||
Specifies the ON DUPLICATE KEY UPDATE clause.
|
||||
|
||||
:param \**kw: Column keys linked to UPDATE values. The
|
||||
values may be any SQL expression or supported literal Python
|
||||
values.
|
||||
|
||||
.. warning:: This dictionary does **not** take into account
|
||||
Python-specified default UPDATE values or generation functions,
|
||||
e.g. those specified using :paramref:`.Column.onupdate`.
|
||||
These values will not be exercised for an ON DUPLICATE KEY UPDATE
|
||||
style of UPDATE, unless values are manually specified here.
|
||||
|
||||
.. versionadded:: 1.2
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`mysql_insert_on_duplicate_key_update`
|
||||
|
||||
"""
|
||||
inserted_alias = getattr(self, 'inserted_alias', None)
|
||||
self._post_values_clause = OnDuplicateClause(inserted_alias, kw)
|
||||
return self
|
||||
|
||||
|
||||
insert = public_factory(Insert, '.dialects.mysql.insert')
|
||||
|
||||
|
||||
class OnDuplicateClause(ClauseElement):
|
||||
__visit_name__ = 'on_duplicate_key_update'
|
||||
|
||||
def __init__(self, inserted_alias, update):
|
||||
self.inserted_alias = inserted_alias
|
||||
if not update or not isinstance(update, dict):
|
||||
raise ValueError('update parameter must be a non-empty dictionary')
|
||||
self.update = update
|
|
@ -0,0 +1,311 @@
|
|||
# mysql/enumerated.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import re
|
||||
|
||||
from .types import _StringType
|
||||
from ... import exc, sql, util
|
||||
from ...sql import sqltypes
|
||||
|
||||
|
||||
class _EnumeratedValues(_StringType):
|
||||
def _init_values(self, values, kw):
|
||||
self.quoting = kw.pop('quoting', 'auto')
|
||||
|
||||
if self.quoting == 'auto' and len(values):
|
||||
# What quoting character are we using?
|
||||
q = None
|
||||
for e in values:
|
||||
if len(e) == 0:
|
||||
self.quoting = 'unquoted'
|
||||
break
|
||||
elif q is None:
|
||||
q = e[0]
|
||||
|
||||
if len(e) == 1 or e[0] != q or e[-1] != q:
|
||||
self.quoting = 'unquoted'
|
||||
break
|
||||
else:
|
||||
self.quoting = 'quoted'
|
||||
|
||||
if self.quoting == 'quoted':
|
||||
util.warn_deprecated(
|
||||
'Manually quoting %s value literals is deprecated. Supply '
|
||||
'unquoted values and use the quoting= option in cases of '
|
||||
'ambiguity.' % self.__class__.__name__)
|
||||
|
||||
values = self._strip_values(values)
|
||||
|
||||
self._enumerated_values = values
|
||||
length = max([len(v) for v in values] + [0])
|
||||
return values, length
|
||||
|
||||
@classmethod
|
||||
def _strip_values(cls, values):
|
||||
strip_values = []
|
||||
for a in values:
|
||||
if a[0:1] == '"' or a[0:1] == "'":
|
||||
# strip enclosing quotes and unquote interior
|
||||
a = a[1:-1].replace(a[0] * 2, a[0])
|
||||
strip_values.append(a)
|
||||
return strip_values
|
||||
|
||||
|
||||
class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum, _EnumeratedValues):
|
||||
"""MySQL ENUM type."""
|
||||
|
||||
__visit_name__ = 'ENUM'
|
||||
|
||||
native_enum = True
|
||||
|
||||
def __init__(self, *enums, **kw):
|
||||
"""Construct an ENUM.
|
||||
|
||||
E.g.::
|
||||
|
||||
Column('myenum', ENUM("foo", "bar", "baz"))
|
||||
|
||||
:param enums: The range of valid values for this ENUM. Values will be
|
||||
quoted when generating the schema according to the quoting flag (see
|
||||
below). This object may also be a PEP-435-compliant enumerated
|
||||
type.
|
||||
|
||||
.. versionadded: 1.1 added support for PEP-435-compliant enumerated
|
||||
types.
|
||||
|
||||
:param strict: This flag has no effect.
|
||||
|
||||
.. versionchanged:: The MySQL ENUM type as well as the base Enum
|
||||
type now validates all Python data values.
|
||||
|
||||
:param charset: Optional, a column-level character set for this string
|
||||
value. Takes precedence to 'ascii' or 'unicode' short-hand.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param ascii: Defaults to False: short-hand for the ``latin1``
|
||||
character set, generates ASCII in schema.
|
||||
|
||||
:param unicode: Defaults to False: short-hand for the ``ucs2``
|
||||
character set, generates UNICODE in schema.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
:param quoting: Defaults to 'auto': automatically determine enum value
|
||||
quoting. If all enum values are surrounded by the same quoting
|
||||
character, then use 'quoted' mode. Otherwise, use 'unquoted' mode.
|
||||
|
||||
'quoted': values in enums are already quoted, they will be used
|
||||
directly when generating the schema - this usage is deprecated.
|
||||
|
||||
'unquoted': values in enums are not quoted, they will be escaped and
|
||||
surrounded by single quotes when generating the schema.
|
||||
|
||||
Previous versions of this type always required manually quoted
|
||||
values to be supplied; future versions will always quote the string
|
||||
literals for you. This is a transitional option.
|
||||
|
||||
"""
|
||||
|
||||
kw.pop('strict', None)
|
||||
self._enum_init(enums, kw)
|
||||
_StringType.__init__(self, length=self.length, **kw)
|
||||
|
||||
@classmethod
|
||||
def adapt_emulated_to_native(cls, impl, **kw):
|
||||
"""Produce a MySQL native :class:`.mysql.ENUM` from plain
|
||||
:class:`.Enum`.
|
||||
|
||||
"""
|
||||
kw.setdefault("validate_strings", impl.validate_strings)
|
||||
kw.setdefault("values_callable", impl.values_callable)
|
||||
return cls(**kw)
|
||||
|
||||
def _setup_for_values(self, values, objects, kw):
|
||||
values, length = self._init_values(values, kw)
|
||||
return super(ENUM, self)._setup_for_values(values, objects, kw)
|
||||
|
||||
def _object_value_for_elem(self, elem):
|
||||
# mysql sends back a blank string for any value that
|
||||
# was persisted that was not in the enums; that is, it does no
|
||||
# validation on the incoming data, it "truncates" it to be
|
||||
# the blank string. Return it straight.
|
||||
if elem == "":
|
||||
return elem
|
||||
else:
|
||||
return super(ENUM, self)._object_value_for_elem(elem)
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(
|
||||
self, to_inspect=[ENUM, _StringType, sqltypes.Enum])
|
||||
|
||||
|
||||
class SET(_EnumeratedValues):
|
||||
"""MySQL SET type."""
|
||||
|
||||
__visit_name__ = 'SET'
|
||||
|
||||
def __init__(self, *values, **kw):
|
||||
"""Construct a SET.
|
||||
|
||||
E.g.::
|
||||
|
||||
Column('myset', SET("foo", "bar", "baz"))
|
||||
|
||||
|
||||
The list of potential values is required in the case that this
|
||||
set will be used to generate DDL for a table, or if the
|
||||
:paramref:`.SET.retrieve_as_bitwise` flag is set to True.
|
||||
|
||||
:param values: The range of valid values for this SET.
|
||||
|
||||
:param convert_unicode: Same flag as that of
|
||||
:paramref:`.String.convert_unicode`.
|
||||
|
||||
:param collation: same as that of :paramref:`.String.collation`
|
||||
|
||||
:param charset: same as that of :paramref:`.VARCHAR.charset`.
|
||||
|
||||
:param ascii: same as that of :paramref:`.VARCHAR.ascii`.
|
||||
|
||||
:param unicode: same as that of :paramref:`.VARCHAR.unicode`.
|
||||
|
||||
:param binary: same as that of :paramref:`.VARCHAR.binary`.
|
||||
|
||||
:param quoting: Defaults to 'auto': automatically determine set value
|
||||
quoting. If all values are surrounded by the same quoting
|
||||
character, then use 'quoted' mode. Otherwise, use 'unquoted' mode.
|
||||
|
||||
'quoted': values in enums are already quoted, they will be used
|
||||
directly when generating the schema - this usage is deprecated.
|
||||
|
||||
'unquoted': values in enums are not quoted, they will be escaped and
|
||||
surrounded by single quotes when generating the schema.
|
||||
|
||||
Previous versions of this type always required manually quoted
|
||||
values to be supplied; future versions will always quote the string
|
||||
literals for you. This is a transitional option.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
:param retrieve_as_bitwise: if True, the data for the set type will be
|
||||
persisted and selected using an integer value, where a set is coerced
|
||||
into a bitwise mask for persistence. MySQL allows this mode which
|
||||
has the advantage of being able to store values unambiguously,
|
||||
such as the blank string ``''``. The datatype will appear
|
||||
as the expression ``col + 0`` in a SELECT statement, so that the
|
||||
value is coerced into an integer value in result sets.
|
||||
This flag is required if one wishes
|
||||
to persist a set that can store the blank string ``''`` as a value.
|
||||
|
||||
.. warning::
|
||||
|
||||
When using :paramref:`.mysql.SET.retrieve_as_bitwise`, it is
|
||||
essential that the list of set values is expressed in the
|
||||
**exact same order** as exists on the MySQL database.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
|
||||
"""
|
||||
self.retrieve_as_bitwise = kw.pop('retrieve_as_bitwise', False)
|
||||
values, length = self._init_values(values, kw)
|
||||
self.values = tuple(values)
|
||||
if not self.retrieve_as_bitwise and '' in values:
|
||||
raise exc.ArgumentError(
|
||||
"Can't use the blank value '' in a SET without "
|
||||
"setting retrieve_as_bitwise=True")
|
||||
if self.retrieve_as_bitwise:
|
||||
self._bitmap = dict(
|
||||
(value, 2 ** idx)
|
||||
for idx, value in enumerate(self.values)
|
||||
)
|
||||
self._bitmap.update(
|
||||
(2 ** idx, value)
|
||||
for idx, value in enumerate(self.values)
|
||||
)
|
||||
kw.setdefault('length', length)
|
||||
super(SET, self).__init__(**kw)
|
||||
|
||||
def column_expression(self, colexpr):
|
||||
if self.retrieve_as_bitwise:
|
||||
return sql.type_coerce(
|
||||
sql.type_coerce(colexpr, sqltypes.Integer) + 0,
|
||||
self
|
||||
)
|
||||
else:
|
||||
return colexpr
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if self.retrieve_as_bitwise:
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = int(value)
|
||||
|
||||
return set(
|
||||
util.map_bits(self._bitmap.__getitem__, value)
|
||||
)
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
super_convert = super(SET, self).result_processor(dialect, coltype)
|
||||
|
||||
def process(value):
|
||||
if isinstance(value, util.string_types):
|
||||
# MySQLdb returns a string, let's parse
|
||||
if super_convert:
|
||||
value = super_convert(value)
|
||||
return set(re.findall(r'[^,]+', value))
|
||||
else:
|
||||
# mysql-connector-python does a naive
|
||||
# split(",") which throws in an empty string
|
||||
if value is not None:
|
||||
value.discard('')
|
||||
return value
|
||||
return process
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
super_convert = super(SET, self).bind_processor(dialect)
|
||||
if self.retrieve_as_bitwise:
|
||||
def process(value):
|
||||
if value is None:
|
||||
return None
|
||||
elif isinstance(value, util.int_types + util.string_types):
|
||||
if super_convert:
|
||||
return super_convert(value)
|
||||
else:
|
||||
return value
|
||||
else:
|
||||
int_value = 0
|
||||
for v in value:
|
||||
int_value |= self._bitmap[v]
|
||||
return int_value
|
||||
else:
|
||||
|
||||
def process(value):
|
||||
# accept strings and int (actually bitflag) values directly
|
||||
if value is not None and not isinstance(
|
||||
value, util.int_types + util.string_types):
|
||||
value = ",".join(value)
|
||||
|
||||
if super_convert:
|
||||
return super_convert(value)
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
||||
def adapt(self, impltype, **kw):
|
||||
kw['retrieve_as_bitwise'] = self.retrieve_as_bitwise
|
||||
return util.constructor_copy(
|
||||
self, impltype,
|
||||
*self.values,
|
||||
**kw
|
||||
)
|
|
@ -0,0 +1,102 @@
|
|||
# mysql/gaerdbms.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
"""
|
||||
.. dialect:: mysql+gaerdbms
|
||||
:name: Google Cloud SQL
|
||||
:dbapi: rdbms
|
||||
:connectstring: mysql+gaerdbms:///<dbname>?instance=<instancename>
|
||||
:url: https://developers.google.com/appengine/docs/python/cloud-sql/\
|
||||
developers-guide
|
||||
|
||||
This dialect is based primarily on the :mod:`.mysql.mysqldb` dialect with
|
||||
minimal changes.
|
||||
|
||||
.. versionadded:: 0.7.8
|
||||
|
||||
.. deprecated:: 1.0 This dialect is **no longer necessary** for
|
||||
Google Cloud SQL; the MySQLdb dialect can be used directly.
|
||||
Cloud SQL now recommends creating connections via the
|
||||
mysql dialect using the URL format
|
||||
|
||||
``mysql+mysqldb://root@/<dbname>?unix_socket=/cloudsql/<projectid>:<instancename>``
|
||||
|
||||
|
||||
Pooling
|
||||
-------
|
||||
|
||||
Google App Engine connections appear to be randomly recycled,
|
||||
so the dialect does not pool connections. The :class:`.NullPool`
|
||||
implementation is installed within the :class:`.Engine` by
|
||||
default.
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from .mysqldb import MySQLDialect_mysqldb
|
||||
from ...pool import NullPool
|
||||
import re
|
||||
from sqlalchemy.util import warn_deprecated
|
||||
|
||||
|
||||
def _is_dev_environment():
|
||||
return os.environ.get('SERVER_SOFTWARE', '').startswith('Development/')
|
||||
|
||||
|
||||
class MySQLDialect_gaerdbms(MySQLDialect_mysqldb):
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
|
||||
warn_deprecated(
|
||||
"Google Cloud SQL now recommends creating connections via the "
|
||||
"MySQLdb dialect directly, using the URL format "
|
||||
"mysql+mysqldb://root@/<dbname>?unix_socket=/cloudsql/"
|
||||
"<projectid>:<instancename>"
|
||||
)
|
||||
|
||||
# from django:
|
||||
# http://code.google.com/p/googleappengine/source/
|
||||
# browse/trunk/python/google/storage/speckle/
|
||||
# python/django/backend/base.py#118
|
||||
# see also [ticket:2649]
|
||||
# see also http://stackoverflow.com/q/14224679/34549
|
||||
from google.appengine.api import apiproxy_stub_map
|
||||
|
||||
if _is_dev_environment():
|
||||
from google.appengine.api import rdbms_mysqldb
|
||||
return rdbms_mysqldb
|
||||
elif apiproxy_stub_map.apiproxy.GetStub('rdbms'):
|
||||
from google.storage.speckle.python.api import rdbms_apiproxy
|
||||
return rdbms_apiproxy
|
||||
else:
|
||||
from google.storage.speckle.python.api import rdbms_googleapi
|
||||
return rdbms_googleapi
|
||||
|
||||
@classmethod
|
||||
def get_pool_class(cls, url):
|
||||
# Cloud SQL connections die at any moment
|
||||
return NullPool
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args()
|
||||
if not _is_dev_environment():
|
||||
# 'dsn' and 'instance' are because we are skipping
|
||||
# the traditional google.api.rdbms wrapper
|
||||
opts['dsn'] = ''
|
||||
opts['instance'] = url.query['instance']
|
||||
return [], opts
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
match = re.compile(r"^(\d+)L?:|^\((\d+)L?,").match(str(exception))
|
||||
# The rdbms api will wrap then re-raise some types of errors
|
||||
# making this regex return no matches.
|
||||
code = match.group(1) or match.group(2) if match else None
|
||||
if code:
|
||||
return int(code)
|
||||
|
||||
dialect = MySQLDialect_gaerdbms
|
|
@ -0,0 +1,79 @@
|
|||
# mysql/json.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
|
||||
from ...sql import elements
|
||||
from ... import types as sqltypes
|
||||
from ... import util
|
||||
|
||||
|
||||
class JSON(sqltypes.JSON):
|
||||
"""MySQL JSON type.
|
||||
|
||||
MySQL supports JSON as of version 5.7. Note that MariaDB does **not**
|
||||
support JSON at the time of this writing.
|
||||
|
||||
The :class:`.mysql.JSON` type supports persistence of JSON values
|
||||
as well as the core index operations provided by :class:`.types.JSON`
|
||||
datatype, by adapting the operations to render the ``JSON_EXTRACT``
|
||||
function at the database level.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class _FormatTypeMixin(object):
|
||||
def _format_value(self, value):
|
||||
raise NotImplementedError()
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
super_proc = self.string_bind_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
value = self._format_value(value)
|
||||
if super_proc:
|
||||
value = super_proc(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def literal_processor(self, dialect):
|
||||
super_proc = self.string_literal_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
value = self._format_value(value)
|
||||
if super_proc:
|
||||
value = super_proc(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
|
||||
class JSONIndexType(_FormatTypeMixin, sqltypes.JSON.JSONIndexType):
|
||||
|
||||
def _format_value(self, value):
|
||||
if isinstance(value, int):
|
||||
value = "$[%s]" % value
|
||||
else:
|
||||
value = '$."%s"' % value
|
||||
return value
|
||||
|
||||
|
||||
class JSONPathType(_FormatTypeMixin, sqltypes.JSON.JSONPathType):
|
||||
def _format_value(self, value):
|
||||
return "$%s" % (
|
||||
"".join([
|
||||
"[%s]" % elem if isinstance(elem, int)
|
||||
else '."%s"' % elem for elem in value
|
||||
])
|
||||
)
|
|
@ -0,0 +1,198 @@
|
|||
# mysql/mysqlconnector.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: mysql+mysqlconnector
|
||||
:name: MySQL Connector/Python
|
||||
:dbapi: myconnpy
|
||||
:connectstring: mysql+mysqlconnector://<user>:<password>@\
|
||||
<host>[:<port>]/<dbname>
|
||||
:url: http://dev.mysql.com/downloads/connector/python/
|
||||
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
Please see :ref:`mysql_unicode` for current recommendations on unicode
|
||||
handling.
|
||||
|
||||
"""
|
||||
|
||||
from .base import (MySQLDialect, MySQLExecutionContext,
|
||||
MySQLCompiler, MySQLIdentifierPreparer,
|
||||
BIT)
|
||||
|
||||
from ... import util
|
||||
import re
|
||||
|
||||
|
||||
class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
|
||||
|
||||
def get_lastrowid(self):
|
||||
return self.cursor.lastrowid
|
||||
|
||||
|
||||
class MySQLCompiler_mysqlconnector(MySQLCompiler):
|
||||
def visit_mod_binary(self, binary, operator, **kw):
|
||||
if self.dialect._mysqlconnector_double_percents:
|
||||
return self.process(binary.left, **kw) + " %% " + \
|
||||
self.process(binary.right, **kw)
|
||||
else:
|
||||
return self.process(binary.left, **kw) + " % " + \
|
||||
self.process(binary.right, **kw)
|
||||
|
||||
def post_process_text(self, text):
|
||||
if self.dialect._mysqlconnector_double_percents:
|
||||
return text.replace('%', '%%')
|
||||
else:
|
||||
return text
|
||||
|
||||
def escape_literal_column(self, text):
|
||||
if self.dialect._mysqlconnector_double_percents:
|
||||
return text.replace('%', '%%')
|
||||
else:
|
||||
return text
|
||||
|
||||
|
||||
class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer):
|
||||
|
||||
def _escape_identifier(self, value):
|
||||
value = value.replace(self.escape_quote, self.escape_to_quote)
|
||||
if self.dialect._mysqlconnector_double_percents:
|
||||
return value.replace("%", "%%")
|
||||
else:
|
||||
return value
|
||||
|
||||
|
||||
class _myconnpyBIT(BIT):
|
||||
def result_processor(self, dialect, coltype):
|
||||
"""MySQL-connector already converts mysql bits, so."""
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class MySQLDialect_mysqlconnector(MySQLDialect):
|
||||
driver = 'mysqlconnector'
|
||||
|
||||
supports_unicode_binds = True
|
||||
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = True
|
||||
|
||||
supports_native_decimal = True
|
||||
|
||||
default_paramstyle = 'format'
|
||||
execution_ctx_cls = MySQLExecutionContext_mysqlconnector
|
||||
statement_compiler = MySQLCompiler_mysqlconnector
|
||||
|
||||
preparer = MySQLIdentifierPreparer_mysqlconnector
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MySQLDialect.colspecs,
|
||||
{
|
||||
BIT: _myconnpyBIT,
|
||||
}
|
||||
)
|
||||
|
||||
@util.memoized_property
|
||||
def supports_unicode_statements(self):
|
||||
return util.py3k or self._mysqlconnector_version_info > (2, 0)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
from mysql import connector
|
||||
return connector
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
|
||||
opts.update(url.query)
|
||||
|
||||
util.coerce_kw_type(opts, 'allow_local_infile', bool)
|
||||
util.coerce_kw_type(opts, 'autocommit', bool)
|
||||
util.coerce_kw_type(opts, 'buffered', bool)
|
||||
util.coerce_kw_type(opts, 'compress', bool)
|
||||
util.coerce_kw_type(opts, 'connection_timeout', int)
|
||||
util.coerce_kw_type(opts, 'connect_timeout', int)
|
||||
util.coerce_kw_type(opts, 'consume_results', bool)
|
||||
util.coerce_kw_type(opts, 'force_ipv6', bool)
|
||||
util.coerce_kw_type(opts, 'get_warnings', bool)
|
||||
util.coerce_kw_type(opts, 'pool_reset_session', bool)
|
||||
util.coerce_kw_type(opts, 'pool_size', int)
|
||||
util.coerce_kw_type(opts, 'raise_on_warnings', bool)
|
||||
util.coerce_kw_type(opts, 'raw', bool)
|
||||
util.coerce_kw_type(opts, 'ssl_verify_cert', bool)
|
||||
util.coerce_kw_type(opts, 'use_pure', bool)
|
||||
util.coerce_kw_type(opts, 'use_unicode', bool)
|
||||
|
||||
# unfortunately, MySQL/connector python refuses to release a
|
||||
# cursor without reading fully, so non-buffered isn't an option
|
||||
opts.setdefault('buffered', True)
|
||||
|
||||
# FOUND_ROWS must be set in ClientFlag to enable
|
||||
# supports_sane_rowcount.
|
||||
if self.dbapi is not None:
|
||||
try:
|
||||
from mysql.connector.constants import ClientFlag
|
||||
client_flags = opts.get(
|
||||
'client_flags', ClientFlag.get_default())
|
||||
client_flags |= ClientFlag.FOUND_ROWS
|
||||
opts['client_flags'] = client_flags
|
||||
except Exception:
|
||||
pass
|
||||
return [[], opts]
|
||||
|
||||
@util.memoized_property
|
||||
def _mysqlconnector_version_info(self):
|
||||
if self.dbapi and hasattr(self.dbapi, '__version__'):
|
||||
m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?',
|
||||
self.dbapi.__version__)
|
||||
if m:
|
||||
return tuple(
|
||||
int(x)
|
||||
for x in m.group(1, 2, 3)
|
||||
if x is not None)
|
||||
|
||||
@util.memoized_property
|
||||
def _mysqlconnector_double_percents(self):
|
||||
return not util.py3k and self._mysqlconnector_version_info < (2, 0)
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
return connection.connection.charset
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
return exception.errno
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
errnos = (2006, 2013, 2014, 2045, 2055, 2048)
|
||||
exceptions = (self.dbapi.OperationalError, self.dbapi.InterfaceError)
|
||||
if isinstance(e, exceptions):
|
||||
return e.errno in errnos or \
|
||||
"MySQL Connection not available." in str(e)
|
||||
else:
|
||||
return False
|
||||
|
||||
def _compat_fetchall(self, rp, charset=None):
|
||||
return rp.fetchall()
|
||||
|
||||
def _compat_fetchone(self, rp, charset=None):
|
||||
return rp.fetchone()
|
||||
|
||||
_isolation_lookup = set(['SERIALIZABLE', 'READ UNCOMMITTED',
|
||||
'READ COMMITTED', 'REPEATABLE READ',
|
||||
'AUTOCOMMIT'])
|
||||
|
||||
def _set_isolation_level(self, connection, level):
|
||||
if level == 'AUTOCOMMIT':
|
||||
connection.autocommit = True
|
||||
else:
|
||||
connection.autocommit = False
|
||||
super(MySQLDialect_mysqlconnector, self)._set_isolation_level(
|
||||
connection, level)
|
||||
|
||||
|
||||
dialect = MySQLDialect_mysqlconnector
|
|
@ -0,0 +1,209 @@
|
|||
# mysql/mysqldb.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
|
||||
.. dialect:: mysql+mysqldb
|
||||
:name: MySQL-Python
|
||||
:dbapi: mysqldb
|
||||
:connectstring: mysql+mysqldb://<user>:<password>@<host>[:<port>]/<dbname>
|
||||
:url: http://sourceforge.net/projects/mysql-python
|
||||
|
||||
.. _mysqldb_unicode:
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
Please see :ref:`mysql_unicode` for current recommendations on unicode
|
||||
handling.
|
||||
|
||||
Py3K Support
|
||||
------------
|
||||
|
||||
Currently, MySQLdb only runs on Python 2 and development has been stopped.
|
||||
`mysqlclient`_ is fork of MySQLdb and provides Python 3 support as well
|
||||
as some bugfixes.
|
||||
|
||||
.. _mysqlclient: https://github.com/PyMySQL/mysqlclient-python
|
||||
|
||||
Using MySQLdb with Google Cloud SQL
|
||||
-----------------------------------
|
||||
|
||||
Google Cloud SQL now recommends use of the MySQLdb dialect. Connect
|
||||
using a URL like the following::
|
||||
|
||||
mysql+mysqldb://root@/<dbname>?unix_socket=/cloudsql/<projectid>:<instancename>
|
||||
|
||||
Server Side Cursors
|
||||
-------------------
|
||||
|
||||
The mysqldb dialect supports server-side cursors. See :ref:`mysql_ss_cursors`.
|
||||
|
||||
"""
|
||||
|
||||
from .base import (MySQLDialect, MySQLExecutionContext,
|
||||
MySQLCompiler, MySQLIdentifierPreparer)
|
||||
from .base import TEXT
|
||||
from ... import sql
|
||||
from ... import util
|
||||
import re
|
||||
|
||||
|
||||
class MySQLExecutionContext_mysqldb(MySQLExecutionContext):
|
||||
|
||||
@property
|
||||
def rowcount(self):
|
||||
if hasattr(self, '_rowcount'):
|
||||
return self._rowcount
|
||||
else:
|
||||
return self.cursor.rowcount
|
||||
|
||||
|
||||
class MySQLCompiler_mysqldb(MySQLCompiler):
|
||||
pass
|
||||
|
||||
|
||||
class MySQLIdentifierPreparer_mysqldb(MySQLIdentifierPreparer):
|
||||
pass
|
||||
|
||||
|
||||
class MySQLDialect_mysqldb(MySQLDialect):
|
||||
driver = 'mysqldb'
|
||||
supports_unicode_statements = True
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = True
|
||||
|
||||
supports_native_decimal = True
|
||||
|
||||
default_paramstyle = 'format'
|
||||
execution_ctx_cls = MySQLExecutionContext_mysqldb
|
||||
statement_compiler = MySQLCompiler_mysqldb
|
||||
preparer = MySQLIdentifierPreparer_mysqldb
|
||||
|
||||
def __init__(self, server_side_cursors=False, **kwargs):
|
||||
super(MySQLDialect_mysqldb, self).__init__(**kwargs)
|
||||
self.server_side_cursors = server_side_cursors
|
||||
|
||||
@util.langhelpers.memoized_property
|
||||
def supports_server_side_cursors(self):
|
||||
try:
|
||||
cursors = __import__('MySQLdb.cursors').cursors
|
||||
self._sscursor = cursors.SSCursor
|
||||
return True
|
||||
except (ImportError, AttributeError):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('MySQLdb')
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context=None):
|
||||
rowcount = cursor.executemany(statement, parameters)
|
||||
if context is not None:
|
||||
context._rowcount = rowcount
|
||||
|
||||
def _check_unicode_returns(self, connection):
|
||||
# work around issue fixed in
|
||||
# https://github.com/farcepest/MySQLdb1/commit/cd44524fef63bd3fcb71947392326e9742d520e8
|
||||
# specific issue w/ the utf8_bin collation and unicode returns
|
||||
|
||||
has_utf8_bin = self.server_version_info > (5, ) and \
|
||||
connection.scalar(
|
||||
"show collation where %s = 'utf8' and %s = 'utf8_bin'"
|
||||
% (
|
||||
self.identifier_preparer.quote("Charset"),
|
||||
self.identifier_preparer.quote("Collation")
|
||||
))
|
||||
if has_utf8_bin:
|
||||
additional_tests = [
|
||||
sql.collate(sql.cast(
|
||||
sql.literal_column(
|
||||
"'test collated returns'"),
|
||||
TEXT(charset='utf8')), "utf8_bin")
|
||||
]
|
||||
else:
|
||||
additional_tests = []
|
||||
return super(MySQLDialect_mysqldb, self)._check_unicode_returns(
|
||||
connection, additional_tests)
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(database='db', username='user',
|
||||
password='passwd')
|
||||
opts.update(url.query)
|
||||
|
||||
util.coerce_kw_type(opts, 'compress', bool)
|
||||
util.coerce_kw_type(opts, 'connect_timeout', int)
|
||||
util.coerce_kw_type(opts, 'read_timeout', int)
|
||||
util.coerce_kw_type(opts, 'client_flag', int)
|
||||
util.coerce_kw_type(opts, 'local_infile', int)
|
||||
# Note: using either of the below will cause all strings to be
|
||||
# returned as Unicode, both in raw SQL operations and with column
|
||||
# types like String and MSString.
|
||||
util.coerce_kw_type(opts, 'use_unicode', bool)
|
||||
util.coerce_kw_type(opts, 'charset', str)
|
||||
|
||||
# Rich values 'cursorclass' and 'conv' are not supported via
|
||||
# query string.
|
||||
|
||||
ssl = {}
|
||||
keys = ['ssl_ca', 'ssl_key', 'ssl_cert', 'ssl_capath', 'ssl_cipher']
|
||||
for key in keys:
|
||||
if key in opts:
|
||||
ssl[key[4:]] = opts[key]
|
||||
util.coerce_kw_type(ssl, key[4:], str)
|
||||
del opts[key]
|
||||
if ssl:
|
||||
opts['ssl'] = ssl
|
||||
|
||||
# FOUND_ROWS must be set in CLIENT_FLAGS to enable
|
||||
# supports_sane_rowcount.
|
||||
client_flag = opts.get('client_flag', 0)
|
||||
if self.dbapi is not None:
|
||||
try:
|
||||
CLIENT_FLAGS = __import__(
|
||||
self.dbapi.__name__ + '.constants.CLIENT'
|
||||
).constants.CLIENT
|
||||
client_flag |= CLIENT_FLAGS.FOUND_ROWS
|
||||
except (AttributeError, ImportError):
|
||||
self.supports_sane_rowcount = False
|
||||
opts['client_flag'] = client_flag
|
||||
return [[], opts]
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
return exception.args[0]
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
"""Sniff out the character set in use for connection results."""
|
||||
|
||||
try:
|
||||
# note: the SQL here would be
|
||||
# "SHOW VARIABLES LIKE 'character_set%%'"
|
||||
cset_name = connection.connection.character_set_name
|
||||
except AttributeError:
|
||||
util.warn(
|
||||
"No 'character_set_name' can be detected with "
|
||||
"this MySQL-Python version; "
|
||||
"please upgrade to a recent version of MySQL-Python. "
|
||||
"Assuming latin1.")
|
||||
return 'latin1'
|
||||
else:
|
||||
return cset_name()
|
||||
|
||||
_isolation_lookup = set(['SERIALIZABLE', 'READ UNCOMMITTED',
|
||||
'READ COMMITTED', 'REPEATABLE READ',
|
||||
'AUTOCOMMIT'])
|
||||
|
||||
def _set_isolation_level(self, connection, level):
|
||||
if level == 'AUTOCOMMIT':
|
||||
connection.autocommit(True)
|
||||
else:
|
||||
connection.autocommit(False)
|
||||
super(MySQLDialect_mysqldb, self)._set_isolation_level(connection,
|
||||
level)
|
||||
|
||||
|
||||
dialect = MySQLDialect_mysqldb
|
|
@ -0,0 +1,243 @@
|
|||
# mysql/oursql.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
|
||||
.. dialect:: mysql+oursql
|
||||
:name: OurSQL
|
||||
:dbapi: oursql
|
||||
:connectstring: mysql+oursql://<user>:<password>@<host>[:<port>]/<dbname>
|
||||
:url: http://packages.python.org/oursql/
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
Please see :ref:`mysql_unicode` for current recommendations on unicode
|
||||
handling.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from .base import (BIT, MySQLDialect, MySQLExecutionContext)
|
||||
from ... import types as sqltypes, util
|
||||
|
||||
|
||||
class _oursqlBIT(BIT):
|
||||
def result_processor(self, dialect, coltype):
|
||||
"""oursql already converts mysql bits, so."""
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class MySQLExecutionContext_oursql(MySQLExecutionContext):
|
||||
|
||||
@property
|
||||
def plain_query(self):
|
||||
return self.execution_options.get('_oursql_plain_query', False)
|
||||
|
||||
|
||||
class MySQLDialect_oursql(MySQLDialect):
|
||||
driver = 'oursql'
|
||||
|
||||
if util.py2k:
|
||||
supports_unicode_binds = True
|
||||
supports_unicode_statements = True
|
||||
|
||||
supports_native_decimal = True
|
||||
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = True
|
||||
execution_ctx_cls = MySQLExecutionContext_oursql
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MySQLDialect.colspecs,
|
||||
{
|
||||
sqltypes.Time: sqltypes.Time,
|
||||
BIT: _oursqlBIT,
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('oursql')
|
||||
|
||||
def do_execute(self, cursor, statement, parameters, context=None):
|
||||
"""Provide an implementation of
|
||||
*cursor.execute(statement, parameters)*."""
|
||||
|
||||
if context and context.plain_query:
|
||||
cursor.execute(statement, plain_query=True)
|
||||
else:
|
||||
cursor.execute(statement, parameters)
|
||||
|
||||
def do_begin(self, connection):
|
||||
connection.cursor().execute('BEGIN', plain_query=True)
|
||||
|
||||
def _xa_query(self, connection, query, xid):
|
||||
if util.py2k:
|
||||
arg = connection.connection._escape_string(xid)
|
||||
else:
|
||||
charset = self._connection_charset
|
||||
arg = connection.connection._escape_string(
|
||||
xid.encode(charset)).decode(charset)
|
||||
arg = "'%s'" % arg
|
||||
connection.execution_options(
|
||||
_oursql_plain_query=True).execute(query % arg)
|
||||
|
||||
# Because mysql is bad, these methods have to be
|
||||
# reimplemented to use _PlainQuery. Basically, some queries
|
||||
# refuse to return any data if they're run through
|
||||
# the parameterized query API, or refuse to be parameterized
|
||||
# in the first place.
|
||||
def do_begin_twophase(self, connection, xid):
|
||||
self._xa_query(connection, 'XA BEGIN %s', xid)
|
||||
|
||||
def do_prepare_twophase(self, connection, xid):
|
||||
self._xa_query(connection, 'XA END %s', xid)
|
||||
self._xa_query(connection, 'XA PREPARE %s', xid)
|
||||
|
||||
def do_rollback_twophase(self, connection, xid, is_prepared=True,
|
||||
recover=False):
|
||||
if not is_prepared:
|
||||
self._xa_query(connection, 'XA END %s', xid)
|
||||
self._xa_query(connection, 'XA ROLLBACK %s', xid)
|
||||
|
||||
def do_commit_twophase(self, connection, xid, is_prepared=True,
|
||||
recover=False):
|
||||
if not is_prepared:
|
||||
self.do_prepare_twophase(connection, xid)
|
||||
self._xa_query(connection, 'XA COMMIT %s', xid)
|
||||
|
||||
# Q: why didn't we need all these "plain_query" overrides earlier ?
|
||||
# am i on a newer/older version of OurSQL ?
|
||||
def has_table(self, connection, table_name, schema=None):
|
||||
return MySQLDialect.has_table(
|
||||
self,
|
||||
connection.connect().execution_options(_oursql_plain_query=True),
|
||||
table_name,
|
||||
schema
|
||||
)
|
||||
|
||||
def get_table_options(self, connection, table_name, schema=None, **kw):
|
||||
return MySQLDialect.get_table_options(
|
||||
self,
|
||||
connection.connect().execution_options(_oursql_plain_query=True),
|
||||
table_name,
|
||||
schema=schema,
|
||||
**kw
|
||||
)
|
||||
|
||||
def get_columns(self, connection, table_name, schema=None, **kw):
|
||||
return MySQLDialect.get_columns(
|
||||
self,
|
||||
connection.connect().execution_options(_oursql_plain_query=True),
|
||||
table_name,
|
||||
schema=schema,
|
||||
**kw
|
||||
)
|
||||
|
||||
def get_view_names(self, connection, schema=None, **kw):
|
||||
return MySQLDialect.get_view_names(
|
||||
self,
|
||||
connection.connect().execution_options(_oursql_plain_query=True),
|
||||
schema=schema,
|
||||
**kw
|
||||
)
|
||||
|
||||
def get_table_names(self, connection, schema=None, **kw):
|
||||
return MySQLDialect.get_table_names(
|
||||
self,
|
||||
connection.connect().execution_options(_oursql_plain_query=True),
|
||||
schema
|
||||
)
|
||||
|
||||
def get_schema_names(self, connection, **kw):
|
||||
return MySQLDialect.get_schema_names(
|
||||
self,
|
||||
connection.connect().execution_options(_oursql_plain_query=True),
|
||||
**kw
|
||||
)
|
||||
|
||||
def initialize(self, connection):
|
||||
return MySQLDialect.initialize(
|
||||
self,
|
||||
connection.execution_options(_oursql_plain_query=True)
|
||||
)
|
||||
|
||||
def _show_create_table(self, connection, table, charset=None,
|
||||
full_name=None):
|
||||
return MySQLDialect._show_create_table(
|
||||
self,
|
||||
connection.contextual_connect(close_with_result=True).
|
||||
execution_options(_oursql_plain_query=True),
|
||||
table, charset, full_name
|
||||
)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, self.dbapi.ProgrammingError):
|
||||
return e.errno is None and 'cursor' not in e.args[1] \
|
||||
and e.args[1].endswith('closed')
|
||||
else:
|
||||
return e.errno in (2006, 2013, 2014, 2045, 2055)
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(database='db', username='user',
|
||||
password='passwd')
|
||||
opts.update(url.query)
|
||||
|
||||
util.coerce_kw_type(opts, 'port', int)
|
||||
util.coerce_kw_type(opts, 'compress', bool)
|
||||
util.coerce_kw_type(opts, 'autoping', bool)
|
||||
util.coerce_kw_type(opts, 'raise_on_warnings', bool)
|
||||
|
||||
util.coerce_kw_type(opts, 'default_charset', bool)
|
||||
if opts.pop('default_charset', False):
|
||||
opts['charset'] = None
|
||||
else:
|
||||
util.coerce_kw_type(opts, 'charset', str)
|
||||
opts['use_unicode'] = opts.get('use_unicode', True)
|
||||
util.coerce_kw_type(opts, 'use_unicode', bool)
|
||||
|
||||
# FOUND_ROWS must be set in CLIENT_FLAGS to enable
|
||||
# supports_sane_rowcount.
|
||||
opts.setdefault('found_rows', True)
|
||||
|
||||
ssl = {}
|
||||
for key in ['ssl_ca', 'ssl_key', 'ssl_cert',
|
||||
'ssl_capath', 'ssl_cipher']:
|
||||
if key in opts:
|
||||
ssl[key[4:]] = opts[key]
|
||||
util.coerce_kw_type(ssl, key[4:], str)
|
||||
del opts[key]
|
||||
if ssl:
|
||||
opts['ssl'] = ssl
|
||||
|
||||
return [[], opts]
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
return exception.errno
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
"""Sniff out the character set in use for connection results."""
|
||||
|
||||
return connection.connection.charset
|
||||
|
||||
def _compat_fetchall(self, rp, charset=None):
|
||||
"""oursql isn't super-broken like MySQLdb, yaaay."""
|
||||
return rp.fetchall()
|
||||
|
||||
def _compat_fetchone(self, rp, charset=None):
|
||||
"""oursql isn't super-broken like MySQLdb, yaaay."""
|
||||
return rp.fetchone()
|
||||
|
||||
def _compat_first(self, rp, charset=None):
|
||||
return rp.first()
|
||||
|
||||
|
||||
dialect = MySQLDialect_oursql
|
|
@ -0,0 +1,70 @@
|
|||
# mysql/pymysql.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
|
||||
.. dialect:: mysql+pymysql
|
||||
:name: PyMySQL
|
||||
:dbapi: pymysql
|
||||
:connectstring: mysql+pymysql://<username>:<password>@<host>/<dbname>\
|
||||
[?<options>]
|
||||
:url: http://www.pymysql.org/
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
Please see :ref:`mysql_unicode` for current recommendations on unicode
|
||||
handling.
|
||||
|
||||
MySQL-Python Compatibility
|
||||
--------------------------
|
||||
|
||||
The pymysql DBAPI is a pure Python port of the MySQL-python (MySQLdb) driver,
|
||||
and targets 100% compatibility. Most behavioral notes for MySQL-python apply
|
||||
to the pymysql driver as well.
|
||||
|
||||
"""
|
||||
|
||||
from .mysqldb import MySQLDialect_mysqldb
|
||||
from ...util import langhelpers, py3k
|
||||
|
||||
|
||||
class MySQLDialect_pymysql(MySQLDialect_mysqldb):
|
||||
driver = 'pymysql'
|
||||
|
||||
description_encoding = None
|
||||
|
||||
# generally, these two values should be both True
|
||||
# or both False. PyMySQL unicode tests pass all the way back
|
||||
# to 0.4 either way. See [ticket:3337]
|
||||
supports_unicode_statements = True
|
||||
supports_unicode_binds = True
|
||||
|
||||
def __init__(self, server_side_cursors=False, **kwargs):
|
||||
super(MySQLDialect_pymysql, self).__init__(**kwargs)
|
||||
self.server_side_cursors = server_side_cursors
|
||||
|
||||
@langhelpers.memoized_property
|
||||
def supports_server_side_cursors(self):
|
||||
try:
|
||||
cursors = __import__('pymysql.cursors').cursors
|
||||
self._sscursor = cursors.SSCursor
|
||||
return True
|
||||
except (ImportError, AttributeError):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('pymysql')
|
||||
|
||||
if py3k:
|
||||
def _extract_error_code(self, exception):
|
||||
if isinstance(exception.args[0], Exception):
|
||||
exception = exception.args[0]
|
||||
return exception.args[0]
|
||||
|
||||
dialect = MySQLDialect_pymysql
|
|
@ -0,0 +1,79 @@
|
|||
# mysql/pyodbc.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
|
||||
|
||||
.. dialect:: mysql+pyodbc
|
||||
:name: PyODBC
|
||||
:dbapi: pyodbc
|
||||
:connectstring: mysql+pyodbc://<username>:<password>@<dsnname>
|
||||
:url: http://pypi.python.org/pypi/pyodbc/
|
||||
|
||||
.. note:: The PyODBC for MySQL dialect is not well supported, and
|
||||
is subject to unresolved character encoding issues
|
||||
which exist within the current ODBC drivers available.
|
||||
(see http://code.google.com/p/pyodbc/issues/detail?id=25).
|
||||
Other dialects for MySQL are recommended.
|
||||
|
||||
"""
|
||||
|
||||
from .base import MySQLDialect, MySQLExecutionContext
|
||||
from ...connectors.pyodbc import PyODBCConnector
|
||||
from ... import util
|
||||
import re
|
||||
|
||||
|
||||
class MySQLExecutionContext_pyodbc(MySQLExecutionContext):
|
||||
|
||||
def get_lastrowid(self):
|
||||
cursor = self.create_cursor()
|
||||
cursor.execute("SELECT LAST_INSERT_ID()")
|
||||
lastrowid = cursor.fetchone()[0]
|
||||
cursor.close()
|
||||
return lastrowid
|
||||
|
||||
|
||||
class MySQLDialect_pyodbc(PyODBCConnector, MySQLDialect):
|
||||
supports_unicode_statements = False
|
||||
execution_ctx_cls = MySQLExecutionContext_pyodbc
|
||||
|
||||
pyodbc_driver_name = "MySQL"
|
||||
|
||||
def __init__(self, **kw):
|
||||
# deal with http://code.google.com/p/pyodbc/issues/detail?id=25
|
||||
kw.setdefault('convert_unicode', True)
|
||||
super(MySQLDialect_pyodbc, self).__init__(**kw)
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
"""Sniff out the character set in use for connection results."""
|
||||
|
||||
# Prefer 'character_set_results' for the current connection over the
|
||||
# value in the driver. SET NAMES or individual variable SETs will
|
||||
# change the charset without updating the driver's view of the world.
|
||||
#
|
||||
# If it's decided that issuing that sort of SQL leaves you SOL, then
|
||||
# this can prefer the driver value.
|
||||
rs = connection.execute("SHOW VARIABLES LIKE 'character_set%%'")
|
||||
opts = {row[0]: row[1] for row in self._compat_fetchall(rs)}
|
||||
for key in ('character_set_connection', 'character_set'):
|
||||
if opts.get(key, None):
|
||||
return opts[key]
|
||||
|
||||
util.warn("Could not detect the connection character set. "
|
||||
"Assuming latin1.")
|
||||
return 'latin1'
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
m = re.compile(r"\((\d+)\)").search(str(exception.args))
|
||||
c = m.group(1)
|
||||
if c:
|
||||
return int(c)
|
||||
else:
|
||||
return None
|
||||
|
||||
dialect = MySQLDialect_pyodbc
|
|
@ -0,0 +1,478 @@
|
|||
# mysql/reflection.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import re
|
||||
from ... import log, util
|
||||
from ... import types as sqltypes
|
||||
from .enumerated import _EnumeratedValues, SET
|
||||
from .types import DATETIME, TIME, TIMESTAMP
|
||||
|
||||
|
||||
class ReflectedState(object):
|
||||
"""Stores raw information about a SHOW CREATE TABLE statement."""
|
||||
|
||||
def __init__(self):
|
||||
self.columns = []
|
||||
self.table_options = {}
|
||||
self.table_name = None
|
||||
self.keys = []
|
||||
self.fk_constraints = []
|
||||
self.ck_constraints = []
|
||||
|
||||
|
||||
@log.class_logger
|
||||
class MySQLTableDefinitionParser(object):
|
||||
"""Parses the results of a SHOW CREATE TABLE statement."""
|
||||
|
||||
def __init__(self, dialect, preparer):
|
||||
self.dialect = dialect
|
||||
self.preparer = preparer
|
||||
self._prep_regexes()
|
||||
|
||||
def parse(self, show_create, charset):
|
||||
state = ReflectedState()
|
||||
state.charset = charset
|
||||
for line in re.split(r'\r?\n', show_create):
|
||||
if line.startswith(' ' + self.preparer.initial_quote):
|
||||
self._parse_column(line, state)
|
||||
# a regular table options line
|
||||
elif line.startswith(') '):
|
||||
self._parse_table_options(line, state)
|
||||
# an ANSI-mode table options line
|
||||
elif line == ')':
|
||||
pass
|
||||
elif line.startswith('CREATE '):
|
||||
self._parse_table_name(line, state)
|
||||
# Not present in real reflection, but may be if
|
||||
# loading from a file.
|
||||
elif not line:
|
||||
pass
|
||||
else:
|
||||
type_, spec = self._parse_constraints(line)
|
||||
if type_ is None:
|
||||
util.warn("Unknown schema content: %r" % line)
|
||||
elif type_ == 'key':
|
||||
state.keys.append(spec)
|
||||
elif type_ == 'fk_constraint':
|
||||
state.fk_constraints.append(spec)
|
||||
elif type_ == 'ck_constraint':
|
||||
state.ck_constraints.append(spec)
|
||||
else:
|
||||
pass
|
||||
return state
|
||||
|
||||
def _parse_constraints(self, line):
|
||||
"""Parse a KEY or CONSTRAINT line.
|
||||
|
||||
:param line: A line of SHOW CREATE TABLE output
|
||||
"""
|
||||
|
||||
# KEY
|
||||
m = self._re_key.match(line)
|
||||
if m:
|
||||
spec = m.groupdict()
|
||||
# convert columns into name, length pairs
|
||||
spec['columns'] = self._parse_keyexprs(spec['columns'])
|
||||
return 'key', spec
|
||||
|
||||
# FOREIGN KEY CONSTRAINT
|
||||
m = self._re_fk_constraint.match(line)
|
||||
if m:
|
||||
spec = m.groupdict()
|
||||
spec['table'] = \
|
||||
self.preparer.unformat_identifiers(spec['table'])
|
||||
spec['local'] = [c[0]
|
||||
for c in self._parse_keyexprs(spec['local'])]
|
||||
spec['foreign'] = [c[0]
|
||||
for c in self._parse_keyexprs(spec['foreign'])]
|
||||
return 'fk_constraint', spec
|
||||
|
||||
# CHECK constraint
|
||||
m = self._re_ck_constraint.match(line)
|
||||
if m:
|
||||
spec = m.groupdict()
|
||||
return 'ck_constraint', spec
|
||||
|
||||
# PARTITION and SUBPARTITION
|
||||
m = self._re_partition.match(line)
|
||||
if m:
|
||||
# Punt!
|
||||
return 'partition', line
|
||||
|
||||
# No match.
|
||||
return (None, line)
|
||||
|
||||
def _parse_table_name(self, line, state):
|
||||
"""Extract the table name.
|
||||
|
||||
:param line: The first line of SHOW CREATE TABLE
|
||||
"""
|
||||
|
||||
regex, cleanup = self._pr_name
|
||||
m = regex.match(line)
|
||||
if m:
|
||||
state.table_name = cleanup(m.group('name'))
|
||||
|
||||
def _parse_table_options(self, line, state):
|
||||
"""Build a dictionary of all reflected table-level options.
|
||||
|
||||
:param line: The final line of SHOW CREATE TABLE output.
|
||||
"""
|
||||
|
||||
options = {}
|
||||
|
||||
if not line or line == ')':
|
||||
pass
|
||||
|
||||
else:
|
||||
rest_of_line = line[:]
|
||||
for regex, cleanup in self._pr_options:
|
||||
m = regex.search(rest_of_line)
|
||||
if not m:
|
||||
continue
|
||||
directive, value = m.group('directive'), m.group('val')
|
||||
if cleanup:
|
||||
value = cleanup(value)
|
||||
options[directive.lower()] = value
|
||||
rest_of_line = regex.sub('', rest_of_line)
|
||||
|
||||
for nope in ('auto_increment', 'data directory', 'index directory'):
|
||||
options.pop(nope, None)
|
||||
|
||||
for opt, val in options.items():
|
||||
state.table_options['%s_%s' % (self.dialect.name, opt)] = val
|
||||
|
||||
def _parse_column(self, line, state):
|
||||
"""Extract column details.
|
||||
|
||||
Falls back to a 'minimal support' variant if full parse fails.
|
||||
|
||||
:param line: Any column-bearing line from SHOW CREATE TABLE
|
||||
"""
|
||||
|
||||
spec = None
|
||||
m = self._re_column.match(line)
|
||||
if m:
|
||||
spec = m.groupdict()
|
||||
spec['full'] = True
|
||||
else:
|
||||
m = self._re_column_loose.match(line)
|
||||
if m:
|
||||
spec = m.groupdict()
|
||||
spec['full'] = False
|
||||
if not spec:
|
||||
util.warn("Unknown column definition %r" % line)
|
||||
return
|
||||
if not spec['full']:
|
||||
util.warn("Incomplete reflection of column definition %r" % line)
|
||||
|
||||
name, type_, args = spec['name'], spec['coltype'], spec['arg']
|
||||
|
||||
try:
|
||||
col_type = self.dialect.ischema_names[type_]
|
||||
except KeyError:
|
||||
util.warn("Did not recognize type '%s' of column '%s'" %
|
||||
(type_, name))
|
||||
col_type = sqltypes.NullType
|
||||
|
||||
# Column type positional arguments eg. varchar(32)
|
||||
if args is None or args == '':
|
||||
type_args = []
|
||||
elif args[0] == "'" and args[-1] == "'":
|
||||
type_args = self._re_csv_str.findall(args)
|
||||
else:
|
||||
type_args = [int(v) for v in self._re_csv_int.findall(args)]
|
||||
|
||||
# Column type keyword options
|
||||
type_kw = {}
|
||||
|
||||
if issubclass(col_type, (DATETIME, TIME, TIMESTAMP)):
|
||||
if type_args:
|
||||
type_kw['fsp'] = type_args.pop(0)
|
||||
|
||||
for kw in ('unsigned', 'zerofill'):
|
||||
if spec.get(kw, False):
|
||||
type_kw[kw] = True
|
||||
for kw in ('charset', 'collate'):
|
||||
if spec.get(kw, False):
|
||||
type_kw[kw] = spec[kw]
|
||||
if issubclass(col_type, _EnumeratedValues):
|
||||
type_args = _EnumeratedValues._strip_values(type_args)
|
||||
|
||||
if issubclass(col_type, SET) and '' in type_args:
|
||||
type_kw['retrieve_as_bitwise'] = True
|
||||
|
||||
type_instance = col_type(*type_args, **type_kw)
|
||||
|
||||
col_kw = {}
|
||||
|
||||
# NOT NULL
|
||||
col_kw['nullable'] = True
|
||||
# this can be "NULL" in the case of TIMESTAMP
|
||||
if spec.get('notnull', False) == 'NOT NULL':
|
||||
col_kw['nullable'] = False
|
||||
|
||||
# AUTO_INCREMENT
|
||||
if spec.get('autoincr', False):
|
||||
col_kw['autoincrement'] = True
|
||||
elif issubclass(col_type, sqltypes.Integer):
|
||||
col_kw['autoincrement'] = False
|
||||
|
||||
# DEFAULT
|
||||
default = spec.get('default', None)
|
||||
|
||||
if default == 'NULL':
|
||||
# eliminates the need to deal with this later.
|
||||
default = None
|
||||
|
||||
comment = spec.get('comment', None)
|
||||
|
||||
if comment is not None:
|
||||
comment = comment.replace("\\\\", "\\").replace("''", "'")
|
||||
|
||||
col_d = dict(name=name, type=type_instance, default=default,
|
||||
comment=comment)
|
||||
col_d.update(col_kw)
|
||||
state.columns.append(col_d)
|
||||
|
||||
def _describe_to_create(self, table_name, columns):
|
||||
"""Re-format DESCRIBE output as a SHOW CREATE TABLE string.
|
||||
|
||||
DESCRIBE is a much simpler reflection and is sufficient for
|
||||
reflecting views for runtime use. This method formats DDL
|
||||
for columns only- keys are omitted.
|
||||
|
||||
:param columns: A sequence of DESCRIBE or SHOW COLUMNS 6-tuples.
|
||||
SHOW FULL COLUMNS FROM rows must be rearranged for use with
|
||||
this function.
|
||||
"""
|
||||
|
||||
buffer = []
|
||||
for row in columns:
|
||||
(name, col_type, nullable, default, extra) = \
|
||||
[row[i] for i in (0, 1, 2, 4, 5)]
|
||||
|
||||
line = [' ']
|
||||
line.append(self.preparer.quote_identifier(name))
|
||||
line.append(col_type)
|
||||
if not nullable:
|
||||
line.append('NOT NULL')
|
||||
if default:
|
||||
if 'auto_increment' in default:
|
||||
pass
|
||||
elif (col_type.startswith('timestamp') and
|
||||
default.startswith('C')):
|
||||
line.append('DEFAULT')
|
||||
line.append(default)
|
||||
elif default == 'NULL':
|
||||
line.append('DEFAULT')
|
||||
line.append(default)
|
||||
else:
|
||||
line.append('DEFAULT')
|
||||
line.append("'%s'" % default.replace("'", "''"))
|
||||
if extra:
|
||||
line.append(extra)
|
||||
|
||||
buffer.append(' '.join(line))
|
||||
|
||||
return ''.join([('CREATE TABLE %s (\n' %
|
||||
self.preparer.quote_identifier(table_name)),
|
||||
',\n'.join(buffer),
|
||||
'\n) '])
|
||||
|
||||
def _parse_keyexprs(self, identifiers):
|
||||
"""Unpack '"col"(2),"col" ASC'-ish strings into components."""
|
||||
|
||||
return self._re_keyexprs.findall(identifiers)
|
||||
|
||||
def _prep_regexes(self):
|
||||
"""Pre-compile regular expressions."""
|
||||
|
||||
self._re_columns = []
|
||||
self._pr_options = []
|
||||
|
||||
_final = self.preparer.final_quote
|
||||
|
||||
quotes = dict(zip(('iq', 'fq', 'esc_fq'),
|
||||
[re.escape(s) for s in
|
||||
(self.preparer.initial_quote,
|
||||
_final,
|
||||
self.preparer._escape_identifier(_final))]))
|
||||
|
||||
self._pr_name = _pr_compile(
|
||||
r'^CREATE (?:\w+ +)?TABLE +'
|
||||
r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +\($' % quotes,
|
||||
self.preparer._unescape_identifier)
|
||||
|
||||
# `col`,`col2`(32),`col3`(15) DESC
|
||||
#
|
||||
# Note: ASC and DESC aren't reflected, so we'll punt...
|
||||
self._re_keyexprs = _re_compile(
|
||||
r'(?:'
|
||||
r'(?:%(iq)s((?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)'
|
||||
r'(?:\((\d+)\))?(?=\,|$))+' % quotes)
|
||||
|
||||
# 'foo' or 'foo','bar' or 'fo,o','ba''a''r'
|
||||
self._re_csv_str = _re_compile(r'\x27(?:\x27\x27|[^\x27])*\x27')
|
||||
|
||||
# 123 or 123,456
|
||||
self._re_csv_int = _re_compile(r'\d+')
|
||||
|
||||
# `colname` <type> [type opts]
|
||||
# (NOT NULL | NULL)
|
||||
# DEFAULT ('value' | CURRENT_TIMESTAMP...)
|
||||
# COMMENT 'comment'
|
||||
# COLUMN_FORMAT (FIXED|DYNAMIC|DEFAULT)
|
||||
# STORAGE (DISK|MEMORY)
|
||||
self._re_column = _re_compile(
|
||||
r" "
|
||||
r"%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +"
|
||||
r"(?P<coltype>\w+)"
|
||||
r"(?:\((?P<arg>(?:\d+|\d+,\d+|"
|
||||
r"(?:'(?:''|[^'])*',?)+))\))?"
|
||||
r"(?: +(?P<unsigned>UNSIGNED))?"
|
||||
r"(?: +(?P<zerofill>ZEROFILL))?"
|
||||
r"(?: +CHARACTER SET +(?P<charset>[\w_]+))?"
|
||||
r"(?: +COLLATE +(?P<collate>[\w_]+))?"
|
||||
r"(?: +(?P<notnull>(?:NOT )?NULL))?"
|
||||
r"(?: +DEFAULT +(?P<default>"
|
||||
r"(?:NULL|'(?:''|[^'])*'|[\w\(\)]+"
|
||||
r"(?: +ON UPDATE [\w\(\)]+)?)"
|
||||
r"))?"
|
||||
r"(?: +(?P<autoincr>AUTO_INCREMENT))?"
|
||||
r"(?: +COMMENT +'(?P<comment>(?:''|[^'])*)')?"
|
||||
r"(?: +COLUMN_FORMAT +(?P<colfmt>\w+))?"
|
||||
r"(?: +STORAGE +(?P<storage>\w+))?"
|
||||
r"(?: +(?P<extra>.*))?"
|
||||
r",?$"
|
||||
% quotes
|
||||
)
|
||||
|
||||
# Fallback, try to parse as little as possible
|
||||
self._re_column_loose = _re_compile(
|
||||
r' '
|
||||
r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +'
|
||||
r'(?P<coltype>\w+)'
|
||||
r'(?:\((?P<arg>(?:\d+|\d+,\d+|\x27(?:\x27\x27|[^\x27])+\x27))\))?'
|
||||
r'.*?(?P<notnull>(?:NOT )NULL)?'
|
||||
% quotes
|
||||
)
|
||||
|
||||
# (PRIMARY|UNIQUE|FULLTEXT|SPATIAL) INDEX `name` (USING (BTREE|HASH))?
|
||||
# (`col` (ASC|DESC)?, `col` (ASC|DESC)?)
|
||||
# KEY_BLOCK_SIZE size | WITH PARSER name
|
||||
self._re_key = _re_compile(
|
||||
r' '
|
||||
r'(?:(?P<type>\S+) )?KEY'
|
||||
r'(?: +%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)?'
|
||||
r'(?: +USING +(?P<using_pre>\S+))?'
|
||||
r' +\((?P<columns>.+?)\)'
|
||||
r'(?: +USING +(?P<using_post>\S+))?'
|
||||
r'(?: +KEY_BLOCK_SIZE *[ =]? *(?P<keyblock>\S+))?'
|
||||
r'(?: +WITH PARSER +(?P<parser>\S+))?'
|
||||
r'(?: +COMMENT +(?P<comment>(\x27\x27|\x27([^\x27])*?\x27)+))?'
|
||||
r',?$'
|
||||
% quotes
|
||||
)
|
||||
|
||||
# CONSTRAINT `name` FOREIGN KEY (`local_col`)
|
||||
# REFERENCES `remote` (`remote_col`)
|
||||
# MATCH FULL | MATCH PARTIAL | MATCH SIMPLE
|
||||
# ON DELETE CASCADE ON UPDATE RESTRICT
|
||||
#
|
||||
# unique constraints come back as KEYs
|
||||
kw = quotes.copy()
|
||||
kw['on'] = 'RESTRICT|CASCADE|SET NULL|NOACTION'
|
||||
self._re_fk_constraint = _re_compile(
|
||||
r' '
|
||||
r'CONSTRAINT +'
|
||||
r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +'
|
||||
r'FOREIGN KEY +'
|
||||
r'\((?P<local>[^\)]+?)\) REFERENCES +'
|
||||
r'(?P<table>%(iq)s[^%(fq)s]+%(fq)s'
|
||||
r'(?:\.%(iq)s[^%(fq)s]+%(fq)s)?) +'
|
||||
r'\((?P<foreign>[^\)]+?)\)'
|
||||
r'(?: +(?P<match>MATCH \w+))?'
|
||||
r'(?: +ON DELETE (?P<ondelete>%(on)s))?'
|
||||
r'(?: +ON UPDATE (?P<onupdate>%(on)s))?'
|
||||
% kw
|
||||
)
|
||||
|
||||
# CONSTRAINT `CONSTRAINT_1` CHECK (`x` > 5)'
|
||||
# testing on MariaDB 10.2 shows that the CHECK constraint
|
||||
# is returned on a line by itself, so to match without worrying
|
||||
# about parenthesis in the expresion we go to the end of the line
|
||||
self._re_ck_constraint = _re_compile(
|
||||
r' '
|
||||
r'CONSTRAINT +'
|
||||
r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +'
|
||||
r'CHECK +'
|
||||
r'\((?P<sqltext>.+)\),?'
|
||||
% kw
|
||||
)
|
||||
|
||||
# PARTITION
|
||||
#
|
||||
# punt!
|
||||
self._re_partition = _re_compile(r'(?:.*)(?:SUB)?PARTITION(?:.*)')
|
||||
|
||||
# Table-level options (COLLATE, ENGINE, etc.)
|
||||
# Do the string options first, since they have quoted
|
||||
# strings we need to get rid of.
|
||||
for option in _options_of_type_string:
|
||||
self._add_option_string(option)
|
||||
|
||||
for option in ('ENGINE', 'TYPE', 'AUTO_INCREMENT',
|
||||
'AVG_ROW_LENGTH', 'CHARACTER SET',
|
||||
'DEFAULT CHARSET', 'CHECKSUM',
|
||||
'COLLATE', 'DELAY_KEY_WRITE', 'INSERT_METHOD',
|
||||
'MAX_ROWS', 'MIN_ROWS', 'PACK_KEYS', 'ROW_FORMAT',
|
||||
'KEY_BLOCK_SIZE'):
|
||||
self._add_option_word(option)
|
||||
|
||||
self._add_option_regex('UNION', r'\([^\)]+\)')
|
||||
self._add_option_regex('TABLESPACE', r'.*? STORAGE DISK')
|
||||
self._add_option_regex(
|
||||
'RAID_TYPE',
|
||||
r'\w+\s+RAID_CHUNKS\s*\=\s*\w+RAID_CHUNKSIZE\s*=\s*\w+')
|
||||
|
||||
_optional_equals = r'(?:\s*(?:=\s*)|\s+)'
|
||||
|
||||
def _add_option_string(self, directive):
|
||||
regex = (r'(?P<directive>%s)%s'
|
||||
r"'(?P<val>(?:[^']|'')*?)'(?!')" %
|
||||
(re.escape(directive), self._optional_equals))
|
||||
self._pr_options.append(_pr_compile(
|
||||
regex, lambda v: v.replace("\\\\", "\\").replace("''", "'")
|
||||
))
|
||||
|
||||
def _add_option_word(self, directive):
|
||||
regex = (r'(?P<directive>%s)%s'
|
||||
r'(?P<val>\w+)' %
|
||||
(re.escape(directive), self._optional_equals))
|
||||
self._pr_options.append(_pr_compile(regex))
|
||||
|
||||
def _add_option_regex(self, directive, regex):
|
||||
regex = (r'(?P<directive>%s)%s'
|
||||
r'(?P<val>%s)' %
|
||||
(re.escape(directive), self._optional_equals, regex))
|
||||
self._pr_options.append(_pr_compile(regex))
|
||||
|
||||
_options_of_type_string = ('COMMENT', 'DATA DIRECTORY', 'INDEX DIRECTORY',
|
||||
'PASSWORD', 'CONNECTION')
|
||||
|
||||
|
||||
def _pr_compile(regex, cleanup=None):
|
||||
"""Prepare a 2-tuple of compiled regex and callable."""
|
||||
|
||||
return (_re_compile(regex), cleanup)
|
||||
|
||||
|
||||
def _re_compile(regex):
|
||||
"""Compile a string to regex, I and UNICODE."""
|
||||
|
||||
return re.compile(regex, re.I | re.UNICODE)
|
|
@ -0,0 +1,766 @@
|
|||
# mysql/types.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import datetime
|
||||
from ... import exc, util
|
||||
from ... import types as sqltypes
|
||||
|
||||
|
||||
class _NumericType(object):
|
||||
"""Base for MySQL numeric types.
|
||||
|
||||
This is the base both for NUMERIC as well as INTEGER, hence
|
||||
it's a mixin.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, unsigned=False, zerofill=False, **kw):
|
||||
self.unsigned = unsigned
|
||||
self.zerofill = zerofill
|
||||
super(_NumericType, self).__init__(**kw)
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(self,
|
||||
to_inspect=[_NumericType, sqltypes.Numeric])
|
||||
|
||||
|
||||
class _FloatType(_NumericType, sqltypes.Float):
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
if isinstance(self, (REAL, DOUBLE)) and \
|
||||
(
|
||||
(precision is None and scale is not None) or
|
||||
(precision is not None and scale is None)
|
||||
):
|
||||
raise exc.ArgumentError(
|
||||
"You must specify both precision and scale or omit "
|
||||
"both altogether.")
|
||||
super(_FloatType, self).__init__(
|
||||
precision=precision, asdecimal=asdecimal, **kw)
|
||||
self.scale = scale
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(self, to_inspect=[_FloatType,
|
||||
_NumericType,
|
||||
sqltypes.Float])
|
||||
|
||||
|
||||
class _IntegerType(_NumericType, sqltypes.Integer):
|
||||
def __init__(self, display_width=None, **kw):
|
||||
self.display_width = display_width
|
||||
super(_IntegerType, self).__init__(**kw)
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(self, to_inspect=[_IntegerType,
|
||||
_NumericType,
|
||||
sqltypes.Integer])
|
||||
|
||||
|
||||
class _StringType(sqltypes.String):
|
||||
"""Base for MySQL string types."""
|
||||
|
||||
def __init__(self, charset=None, collation=None,
|
||||
ascii=False, binary=False, unicode=False,
|
||||
national=False, **kw):
|
||||
self.charset = charset
|
||||
|
||||
# allow collate= or collation=
|
||||
kw.setdefault('collation', kw.pop('collate', collation))
|
||||
|
||||
self.ascii = ascii
|
||||
self.unicode = unicode
|
||||
self.binary = binary
|
||||
self.national = national
|
||||
super(_StringType, self).__init__(**kw)
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(self,
|
||||
to_inspect=[_StringType, sqltypes.String])
|
||||
|
||||
|
||||
class _MatchType(sqltypes.Float, sqltypes.MatchType):
|
||||
def __init__(self, **kw):
|
||||
# TODO: float arguments?
|
||||
sqltypes.Float.__init__(self)
|
||||
sqltypes.MatchType.__init__(self)
|
||||
|
||||
|
||||
|
||||
class NUMERIC(_NumericType, sqltypes.NUMERIC):
|
||||
"""MySQL NUMERIC type."""
|
||||
|
||||
__visit_name__ = 'NUMERIC'
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a NUMERIC.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super(NUMERIC, self).__init__(precision=precision,
|
||||
scale=scale, asdecimal=asdecimal, **kw)
|
||||
|
||||
|
||||
class DECIMAL(_NumericType, sqltypes.DECIMAL):
|
||||
"""MySQL DECIMAL type."""
|
||||
|
||||
__visit_name__ = 'DECIMAL'
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a DECIMAL.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super(DECIMAL, self).__init__(precision=precision, scale=scale,
|
||||
asdecimal=asdecimal, **kw)
|
||||
|
||||
|
||||
class DOUBLE(_FloatType):
|
||||
"""MySQL DOUBLE type."""
|
||||
|
||||
__visit_name__ = 'DOUBLE'
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a DOUBLE.
|
||||
|
||||
.. note::
|
||||
|
||||
The :class:`.DOUBLE` type by default converts from float
|
||||
to Decimal, using a truncation that defaults to 10 digits.
|
||||
Specify either ``scale=n`` or ``decimal_return_scale=n`` in order
|
||||
to change this scale, or ``asdecimal=False`` to return values
|
||||
directly as Python floating points.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super(DOUBLE, self).__init__(precision=precision, scale=scale,
|
||||
asdecimal=asdecimal, **kw)
|
||||
|
||||
|
||||
class REAL(_FloatType, sqltypes.REAL):
|
||||
"""MySQL REAL type."""
|
||||
|
||||
__visit_name__ = 'REAL'
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a REAL.
|
||||
|
||||
.. note::
|
||||
|
||||
The :class:`.REAL` type by default converts from float
|
||||
to Decimal, using a truncation that defaults to 10 digits.
|
||||
Specify either ``scale=n`` or ``decimal_return_scale=n`` in order
|
||||
to change this scale, or ``asdecimal=False`` to return values
|
||||
directly as Python floating points.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super(REAL, self).__init__(precision=precision, scale=scale,
|
||||
asdecimal=asdecimal, **kw)
|
||||
|
||||
|
||||
class FLOAT(_FloatType, sqltypes.FLOAT):
|
||||
"""MySQL FLOAT type."""
|
||||
|
||||
__visit_name__ = 'FLOAT'
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=False, **kw):
|
||||
"""Construct a FLOAT.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super(FLOAT, self).__init__(precision=precision, scale=scale,
|
||||
asdecimal=asdecimal, **kw)
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
return None
|
||||
|
||||
|
||||
class INTEGER(_IntegerType, sqltypes.INTEGER):
|
||||
"""MySQL INTEGER type."""
|
||||
|
||||
__visit_name__ = 'INTEGER'
|
||||
|
||||
def __init__(self, display_width=None, **kw):
|
||||
"""Construct an INTEGER.
|
||||
|
||||
:param display_width: Optional, maximum display width for this number.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super(INTEGER, self).__init__(display_width=display_width, **kw)
|
||||
|
||||
|
||||
class BIGINT(_IntegerType, sqltypes.BIGINT):
|
||||
"""MySQL BIGINTEGER type."""
|
||||
|
||||
__visit_name__ = 'BIGINT'
|
||||
|
||||
def __init__(self, display_width=None, **kw):
|
||||
"""Construct a BIGINTEGER.
|
||||
|
||||
:param display_width: Optional, maximum display width for this number.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super(BIGINT, self).__init__(display_width=display_width, **kw)
|
||||
|
||||
|
||||
class MEDIUMINT(_IntegerType):
|
||||
"""MySQL MEDIUMINTEGER type."""
|
||||
|
||||
__visit_name__ = 'MEDIUMINT'
|
||||
|
||||
def __init__(self, display_width=None, **kw):
|
||||
"""Construct a MEDIUMINTEGER
|
||||
|
||||
:param display_width: Optional, maximum display width for this number.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super(MEDIUMINT, self).__init__(display_width=display_width, **kw)
|
||||
|
||||
|
||||
class TINYINT(_IntegerType):
|
||||
"""MySQL TINYINT type."""
|
||||
|
||||
__visit_name__ = 'TINYINT'
|
||||
|
||||
def __init__(self, display_width=None, **kw):
|
||||
"""Construct a TINYINT.
|
||||
|
||||
:param display_width: Optional, maximum display width for this number.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super(TINYINT, self).__init__(display_width=display_width, **kw)
|
||||
|
||||
|
||||
class SMALLINT(_IntegerType, sqltypes.SMALLINT):
|
||||
"""MySQL SMALLINTEGER type."""
|
||||
|
||||
__visit_name__ = 'SMALLINT'
|
||||
|
||||
def __init__(self, display_width=None, **kw):
|
||||
"""Construct a SMALLINTEGER.
|
||||
|
||||
:param display_width: Optional, maximum display width for this number.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super(SMALLINT, self).__init__(display_width=display_width, **kw)
|
||||
|
||||
|
||||
class BIT(sqltypes.TypeEngine):
|
||||
"""MySQL BIT type.
|
||||
|
||||
This type is for MySQL 5.0.3 or greater for MyISAM, and 5.0.5 or greater
|
||||
for MyISAM, MEMORY, InnoDB and BDB. For older versions, use a
|
||||
MSTinyInteger() type.
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'BIT'
|
||||
|
||||
def __init__(self, length=None):
|
||||
"""Construct a BIT.
|
||||
|
||||
:param length: Optional, number of bits.
|
||||
|
||||
"""
|
||||
self.length = length
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
"""Convert a MySQL's 64 bit, variable length binary string to a long.
|
||||
|
||||
TODO: this is MySQL-db, pyodbc specific. OurSQL and mysqlconnector
|
||||
already do this, so this logic should be moved to those dialects.
|
||||
|
||||
"""
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
v = 0
|
||||
for i in value:
|
||||
if not isinstance(i, int):
|
||||
i = ord(i) # convert byte to int on Python 2
|
||||
v = v << 8 | i
|
||||
return v
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
class TIME(sqltypes.TIME):
|
||||
"""MySQL TIME type. """
|
||||
|
||||
__visit_name__ = 'TIME'
|
||||
|
||||
def __init__(self, timezone=False, fsp=None):
|
||||
"""Construct a MySQL TIME type.
|
||||
|
||||
:param timezone: not used by the MySQL dialect.
|
||||
:param fsp: fractional seconds precision value.
|
||||
MySQL 5.6 supports storage of fractional seconds;
|
||||
this parameter will be used when emitting DDL
|
||||
for the TIME type.
|
||||
|
||||
.. note::
|
||||
|
||||
DBAPI driver support for fractional seconds may
|
||||
be limited; current support includes
|
||||
MySQL Connector/Python.
|
||||
|
||||
.. versionadded:: 0.8 The MySQL-specific TIME
|
||||
type as well as fractional seconds support.
|
||||
|
||||
"""
|
||||
super(TIME, self).__init__(timezone=timezone)
|
||||
self.fsp = fsp
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
time = datetime.time
|
||||
|
||||
def process(value):
|
||||
# convert from a timedelta value
|
||||
if value is not None:
|
||||
microseconds = value.microseconds
|
||||
seconds = value.seconds
|
||||
minutes = seconds // 60
|
||||
return time(minutes // 60,
|
||||
minutes % 60,
|
||||
seconds - minutes * 60,
|
||||
microsecond=microseconds)
|
||||
else:
|
||||
return None
|
||||
return process
|
||||
|
||||
|
||||
class TIMESTAMP(sqltypes.TIMESTAMP):
|
||||
"""MySQL TIMESTAMP type.
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'TIMESTAMP'
|
||||
|
||||
def __init__(self, timezone=False, fsp=None):
|
||||
"""Construct a MySQL TIMESTAMP type.
|
||||
|
||||
:param timezone: not used by the MySQL dialect.
|
||||
:param fsp: fractional seconds precision value.
|
||||
MySQL 5.6.4 supports storage of fractional seconds;
|
||||
this parameter will be used when emitting DDL
|
||||
for the TIMESTAMP type.
|
||||
|
||||
.. note::
|
||||
|
||||
DBAPI driver support for fractional seconds may
|
||||
be limited; current support includes
|
||||
MySQL Connector/Python.
|
||||
|
||||
.. versionadded:: 0.8.5 Added MySQL-specific :class:`.mysql.TIMESTAMP`
|
||||
with fractional seconds support.
|
||||
|
||||
"""
|
||||
super(TIMESTAMP, self).__init__(timezone=timezone)
|
||||
self.fsp = fsp
|
||||
|
||||
|
||||
class DATETIME(sqltypes.DATETIME):
|
||||
"""MySQL DATETIME type.
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'DATETIME'
|
||||
|
||||
def __init__(self, timezone=False, fsp=None):
|
||||
"""Construct a MySQL DATETIME type.
|
||||
|
||||
:param timezone: not used by the MySQL dialect.
|
||||
:param fsp: fractional seconds precision value.
|
||||
MySQL 5.6.4 supports storage of fractional seconds;
|
||||
this parameter will be used when emitting DDL
|
||||
for the DATETIME type.
|
||||
|
||||
.. note::
|
||||
|
||||
DBAPI driver support for fractional seconds may
|
||||
be limited; current support includes
|
||||
MySQL Connector/Python.
|
||||
|
||||
.. versionadded:: 0.8.5 Added MySQL-specific :class:`.mysql.DATETIME`
|
||||
with fractional seconds support.
|
||||
|
||||
"""
|
||||
super(DATETIME, self).__init__(timezone=timezone)
|
||||
self.fsp = fsp
|
||||
|
||||
|
||||
class YEAR(sqltypes.TypeEngine):
|
||||
"""MySQL YEAR type, for single byte storage of years 1901-2155."""
|
||||
|
||||
__visit_name__ = 'YEAR'
|
||||
|
||||
def __init__(self, display_width=None):
|
||||
self.display_width = display_width
|
||||
|
||||
|
||||
class TEXT(_StringType, sqltypes.TEXT):
|
||||
"""MySQL TEXT type, for text up to 2^16 characters."""
|
||||
|
||||
__visit_name__ = 'TEXT'
|
||||
|
||||
def __init__(self, length=None, **kw):
|
||||
"""Construct a TEXT.
|
||||
|
||||
:param length: Optional, if provided the server may optimize storage
|
||||
by substituting the smallest TEXT type sufficient to store
|
||||
``length`` characters.
|
||||
|
||||
:param charset: Optional, a column-level character set for this string
|
||||
value. Takes precedence to 'ascii' or 'unicode' short-hand.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param ascii: Defaults to False: short-hand for the ``latin1``
|
||||
character set, generates ASCII in schema.
|
||||
|
||||
:param unicode: Defaults to False: short-hand for the ``ucs2``
|
||||
character set, generates UNICODE in schema.
|
||||
|
||||
:param national: Optional. If true, use the server's configured
|
||||
national character set.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
"""
|
||||
super(TEXT, self).__init__(length=length, **kw)
|
||||
|
||||
|
||||
class TINYTEXT(_StringType):
|
||||
"""MySQL TINYTEXT type, for text up to 2^8 characters."""
|
||||
|
||||
__visit_name__ = 'TINYTEXT'
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Construct a TINYTEXT.
|
||||
|
||||
:param charset: Optional, a column-level character set for this string
|
||||
value. Takes precedence to 'ascii' or 'unicode' short-hand.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param ascii: Defaults to False: short-hand for the ``latin1``
|
||||
character set, generates ASCII in schema.
|
||||
|
||||
:param unicode: Defaults to False: short-hand for the ``ucs2``
|
||||
character set, generates UNICODE in schema.
|
||||
|
||||
:param national: Optional. If true, use the server's configured
|
||||
national character set.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
"""
|
||||
super(TINYTEXT, self).__init__(**kwargs)
|
||||
|
||||
|
||||
class MEDIUMTEXT(_StringType):
|
||||
"""MySQL MEDIUMTEXT type, for text up to 2^24 characters."""
|
||||
|
||||
__visit_name__ = 'MEDIUMTEXT'
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Construct a MEDIUMTEXT.
|
||||
|
||||
:param charset: Optional, a column-level character set for this string
|
||||
value. Takes precedence to 'ascii' or 'unicode' short-hand.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param ascii: Defaults to False: short-hand for the ``latin1``
|
||||
character set, generates ASCII in schema.
|
||||
|
||||
:param unicode: Defaults to False: short-hand for the ``ucs2``
|
||||
character set, generates UNICODE in schema.
|
||||
|
||||
:param national: Optional. If true, use the server's configured
|
||||
national character set.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
"""
|
||||
super(MEDIUMTEXT, self).__init__(**kwargs)
|
||||
|
||||
|
||||
class LONGTEXT(_StringType):
|
||||
"""MySQL LONGTEXT type, for text up to 2^32 characters."""
|
||||
|
||||
__visit_name__ = 'LONGTEXT'
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Construct a LONGTEXT.
|
||||
|
||||
:param charset: Optional, a column-level character set for this string
|
||||
value. Takes precedence to 'ascii' or 'unicode' short-hand.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param ascii: Defaults to False: short-hand for the ``latin1``
|
||||
character set, generates ASCII in schema.
|
||||
|
||||
:param unicode: Defaults to False: short-hand for the ``ucs2``
|
||||
character set, generates UNICODE in schema.
|
||||
|
||||
:param national: Optional. If true, use the server's configured
|
||||
national character set.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
"""
|
||||
super(LONGTEXT, self).__init__(**kwargs)
|
||||
|
||||
|
||||
class VARCHAR(_StringType, sqltypes.VARCHAR):
|
||||
"""MySQL VARCHAR type, for variable-length character data."""
|
||||
|
||||
__visit_name__ = 'VARCHAR'
|
||||
|
||||
def __init__(self, length=None, **kwargs):
|
||||
"""Construct a VARCHAR.
|
||||
|
||||
:param charset: Optional, a column-level character set for this string
|
||||
value. Takes precedence to 'ascii' or 'unicode' short-hand.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param ascii: Defaults to False: short-hand for the ``latin1``
|
||||
character set, generates ASCII in schema.
|
||||
|
||||
:param unicode: Defaults to False: short-hand for the ``ucs2``
|
||||
character set, generates UNICODE in schema.
|
||||
|
||||
:param national: Optional. If true, use the server's configured
|
||||
national character set.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
"""
|
||||
super(VARCHAR, self).__init__(length=length, **kwargs)
|
||||
|
||||
|
||||
class CHAR(_StringType, sqltypes.CHAR):
|
||||
"""MySQL CHAR type, for fixed-length character data."""
|
||||
|
||||
__visit_name__ = 'CHAR'
|
||||
|
||||
def __init__(self, length=None, **kwargs):
|
||||
"""Construct a CHAR.
|
||||
|
||||
:param length: Maximum data length, in characters.
|
||||
|
||||
:param binary: Optional, use the default binary collation for the
|
||||
national character set. This does not affect the type of data
|
||||
stored, use a BINARY type for binary data.
|
||||
|
||||
:param collation: Optional, request a particular collation. Must be
|
||||
compatible with the national character set.
|
||||
|
||||
"""
|
||||
super(CHAR, self).__init__(length=length, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def _adapt_string_for_cast(self, type_):
|
||||
# copy the given string type into a CHAR
|
||||
# for the purposes of rendering a CAST expression
|
||||
type_ = sqltypes.to_instance(type_)
|
||||
if isinstance(type_, sqltypes.CHAR):
|
||||
return type_
|
||||
elif isinstance(type_, _StringType):
|
||||
return CHAR(
|
||||
length=type_.length,
|
||||
charset=type_.charset,
|
||||
collation=type_.collation,
|
||||
ascii=type_.ascii,
|
||||
binary=type_.binary,
|
||||
unicode=type_.unicode,
|
||||
national=False # not supported in CAST
|
||||
)
|
||||
else:
|
||||
return CHAR(length=type_.length)
|
||||
|
||||
|
||||
class NVARCHAR(_StringType, sqltypes.NVARCHAR):
|
||||
"""MySQL NVARCHAR type.
|
||||
|
||||
For variable-length character data in the server's configured national
|
||||
character set.
|
||||
"""
|
||||
|
||||
__visit_name__ = 'NVARCHAR'
|
||||
|
||||
def __init__(self, length=None, **kwargs):
|
||||
"""Construct an NVARCHAR.
|
||||
|
||||
:param length: Maximum data length, in characters.
|
||||
|
||||
:param binary: Optional, use the default binary collation for the
|
||||
national character set. This does not affect the type of data
|
||||
stored, use a BINARY type for binary data.
|
||||
|
||||
:param collation: Optional, request a particular collation. Must be
|
||||
compatible with the national character set.
|
||||
|
||||
"""
|
||||
kwargs['national'] = True
|
||||
super(NVARCHAR, self).__init__(length=length, **kwargs)
|
||||
|
||||
|
||||
class NCHAR(_StringType, sqltypes.NCHAR):
|
||||
"""MySQL NCHAR type.
|
||||
|
||||
For fixed-length character data in the server's configured national
|
||||
character set.
|
||||
"""
|
||||
|
||||
__visit_name__ = 'NCHAR'
|
||||
|
||||
def __init__(self, length=None, **kwargs):
|
||||
"""Construct an NCHAR.
|
||||
|
||||
:param length: Maximum data length, in characters.
|
||||
|
||||
:param binary: Optional, use the default binary collation for the
|
||||
national character set. This does not affect the type of data
|
||||
stored, use a BINARY type for binary data.
|
||||
|
||||
:param collation: Optional, request a particular collation. Must be
|
||||
compatible with the national character set.
|
||||
|
||||
"""
|
||||
kwargs['national'] = True
|
||||
super(NCHAR, self).__init__(length=length, **kwargs)
|
||||
|
||||
|
||||
class TINYBLOB(sqltypes._Binary):
|
||||
"""MySQL TINYBLOB type, for binary data up to 2^8 bytes."""
|
||||
|
||||
__visit_name__ = 'TINYBLOB'
|
||||
|
||||
|
||||
class MEDIUMBLOB(sqltypes._Binary):
|
||||
"""MySQL MEDIUMBLOB type, for binary data up to 2^24 bytes."""
|
||||
|
||||
__visit_name__ = 'MEDIUMBLOB'
|
||||
|
||||
|
||||
class LONGBLOB(sqltypes._Binary):
|
||||
"""MySQL LONGBLOB type, for binary data up to 2^32 bytes."""
|
||||
|
||||
__visit_name__ = 'LONGBLOB'
|
|
@ -0,0 +1,117 @@
|
|||
# mysql/zxjdbc.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
|
||||
.. dialect:: mysql+zxjdbc
|
||||
:name: zxjdbc for Jython
|
||||
:dbapi: zxjdbc
|
||||
:connectstring: mysql+zxjdbc://<user>:<password>@<hostname>[:<port>]/\
|
||||
<database>
|
||||
:driverurl: http://dev.mysql.com/downloads/connector/j/
|
||||
|
||||
.. note:: Jython is not supported by current versions of SQLAlchemy. The
|
||||
zxjdbc dialect should be considered as experimental.
|
||||
|
||||
Character Sets
|
||||
--------------
|
||||
|
||||
SQLAlchemy zxjdbc dialects pass unicode straight through to the
|
||||
zxjdbc/JDBC layer. To allow multiple character sets to be sent from the
|
||||
MySQL Connector/J JDBC driver, by default SQLAlchemy sets its
|
||||
``characterEncoding`` connection property to ``UTF-8``. It may be
|
||||
overridden via a ``create_engine`` URL parameter.
|
||||
|
||||
"""
|
||||
import re
|
||||
|
||||
from ... import types as sqltypes, util
|
||||
from ...connectors.zxJDBC import ZxJDBCConnector
|
||||
from .base import BIT, MySQLDialect, MySQLExecutionContext
|
||||
|
||||
|
||||
class _ZxJDBCBit(BIT):
|
||||
def result_processor(self, dialect, coltype):
|
||||
"""Converts boolean or byte arrays from MySQL Connector/J to longs."""
|
||||
def process(value):
|
||||
if value is None:
|
||||
return value
|
||||
if isinstance(value, bool):
|
||||
return int(value)
|
||||
v = 0
|
||||
for i in value:
|
||||
v = v << 8 | (i & 0xff)
|
||||
value = v
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
class MySQLExecutionContext_zxjdbc(MySQLExecutionContext):
|
||||
def get_lastrowid(self):
|
||||
cursor = self.create_cursor()
|
||||
cursor.execute("SELECT LAST_INSERT_ID()")
|
||||
lastrowid = cursor.fetchone()[0]
|
||||
cursor.close()
|
||||
return lastrowid
|
||||
|
||||
|
||||
class MySQLDialect_zxjdbc(ZxJDBCConnector, MySQLDialect):
|
||||
jdbc_db_name = 'mysql'
|
||||
jdbc_driver_name = 'com.mysql.jdbc.Driver'
|
||||
|
||||
execution_ctx_cls = MySQLExecutionContext_zxjdbc
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MySQLDialect.colspecs,
|
||||
{
|
||||
sqltypes.Time: sqltypes.Time,
|
||||
BIT: _ZxJDBCBit
|
||||
}
|
||||
)
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
"""Sniff out the character set in use for connection results."""
|
||||
# Prefer 'character_set_results' for the current connection over the
|
||||
# value in the driver. SET NAMES or individual variable SETs will
|
||||
# change the charset without updating the driver's view of the world.
|
||||
#
|
||||
# If it's decided that issuing that sort of SQL leaves you SOL, then
|
||||
# this can prefer the driver value.
|
||||
rs = connection.execute("SHOW VARIABLES LIKE 'character_set%%'")
|
||||
opts = {row[0]: row[1] for row in self._compat_fetchall(rs)}
|
||||
for key in ('character_set_connection', 'character_set'):
|
||||
if opts.get(key, None):
|
||||
return opts[key]
|
||||
|
||||
util.warn("Could not detect the connection character set. "
|
||||
"Assuming latin1.")
|
||||
return 'latin1'
|
||||
|
||||
def _driver_kwargs(self):
|
||||
"""return kw arg dict to be sent to connect()."""
|
||||
return dict(characterEncoding='UTF-8', yearIsDateType='false')
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
# e.g.: DBAPIError: (Error) Table 'test.u2' doesn't exist
|
||||
# [SQLCode: 1146], [SQLState: 42S02] 'DESCRIBE `u2`' ()
|
||||
m = re.compile(r"\[SQLCode\: (\d+)\]").search(str(exception.args))
|
||||
c = m.group(1)
|
||||
if c:
|
||||
return int(c)
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
dbapi_con = connection.connection
|
||||
version = []
|
||||
r = re.compile(r'[.\-]')
|
||||
for n in r.split(dbapi_con.dbversion):
|
||||
try:
|
||||
version.append(int(n))
|
||||
except ValueError:
|
||||
version.append(n)
|
||||
return tuple(version)
|
||||
|
||||
dialect = MySQLDialect_zxjdbc
|
|
@ -0,0 +1,24 @@
|
|||
# oracle/__init__.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from . import base, cx_oracle, zxjdbc # noqa
|
||||
|
||||
from .base import \
|
||||
VARCHAR, NVARCHAR, CHAR, DATE, NUMBER,\
|
||||
BLOB, BFILE, BINARY_FLOAT, BINARY_DOUBLE, CLOB, NCLOB, TIMESTAMP, RAW,\
|
||||
FLOAT, DOUBLE_PRECISION, LONG, INTERVAL,\
|
||||
VARCHAR2, NVARCHAR2, ROWID
|
||||
|
||||
base.dialect = dialect = cx_oracle.dialect
|
||||
|
||||
__all__ = (
|
||||
'VARCHAR', 'NVARCHAR', 'CHAR', 'DATE', 'NUMBER',
|
||||
'BLOB', 'BFILE', 'CLOB', 'NCLOB', 'TIMESTAMP', 'RAW',
|
||||
'FLOAT', 'DOUBLE_PRECISION', 'BINARY_DOUBLE', 'BINARY_FLOAT',
|
||||
'LONG', 'dialect', 'INTERVAL',
|
||||
'VARCHAR2', 'NVARCHAR2', 'ROWID'
|
||||
)
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,911 @@
|
|||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
|
||||
.. dialect:: oracle+cx_oracle
|
||||
:name: cx-Oracle
|
||||
:dbapi: cx_oracle
|
||||
:connectstring: oracle+cx_oracle://user:pass@host:port/dbname\
|
||||
[?key=value&key=value...]
|
||||
:url: http://cx-oracle.sourceforge.net/
|
||||
|
||||
Additional Connect Arguments
|
||||
----------------------------
|
||||
|
||||
When connecting with ``dbname`` present, the host, port, and dbname tokens are
|
||||
converted to a TNS name using
|
||||
the cx_oracle ``makedsn()`` function. Otherwise, the host token is taken
|
||||
directly as a TNS name.
|
||||
|
||||
Additional arguments which may be specified either as query string arguments
|
||||
on the URL, or as keyword arguments to :func:`.create_engine()` are:
|
||||
|
||||
* ``arraysize`` - set the cx_oracle.arraysize value on cursors, defaulted
|
||||
to 50. This setting is significant with cx_Oracle as the contents of LOB
|
||||
objects are only readable within a "live" row (e.g. within a batch of
|
||||
50 rows).
|
||||
|
||||
* ``auto_convert_lobs`` - defaults to True; See :ref:`cx_oracle_lob`.
|
||||
|
||||
* ``coerce_to_unicode`` - see :ref:`cx_oracle_unicode` for detail.
|
||||
|
||||
* ``coerce_to_decimal`` - see :ref:`cx_oracle_numeric` for detail.
|
||||
|
||||
* ``mode`` - This is given the string value of SYSDBA or SYSOPER, or
|
||||
alternatively an integer value. This value is only available as a URL query
|
||||
string argument.
|
||||
|
||||
* ``threaded`` - enable multithreaded access to cx_oracle connections.
|
||||
Defaults to ``True``. Note that this is the opposite default of the
|
||||
cx_Oracle DBAPI itself.
|
||||
|
||||
* ``service_name`` - An option to use connection string (DSN) with
|
||||
``SERVICE_NAME`` instead of ``SID``. It can't be passed when a ``database``
|
||||
part is given.
|
||||
E.g. ``oracle+cx_oracle://scott:tiger@host:1521/?service_name=hr``
|
||||
is a valid url. This value is only available as a URL query string argument.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
.. _cx_oracle_unicode:
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
The cx_Oracle DBAPI as of version 5 fully supports unicode, and has the
|
||||
ability to return string results as Python unicode objects natively.
|
||||
|
||||
When used in Python 3, cx_Oracle returns all strings as Python unicode objects
|
||||
(that is, plain ``str`` in Python 3). In Python 2, it will return as Python
|
||||
unicode those column values that are of type ``NVARCHAR`` or ``NCLOB``. For
|
||||
column values that are of type ``VARCHAR`` or other non-unicode string types,
|
||||
it will return values as Python strings (e.g. bytestrings).
|
||||
|
||||
The cx_Oracle SQLAlchemy dialect presents several different options for the use
|
||||
case of receiving ``VARCHAR`` column values as Python unicode objects under
|
||||
Python 2:
|
||||
|
||||
* When using Core expression objects as well as the ORM, SQLAlchemy's
|
||||
unicode-decoding services are available, which are established by
|
||||
using either the :class:`.Unicode` datatype or by using the
|
||||
:class:`.String` datatype with :paramref:`.String.convert_unicode` set
|
||||
to True.
|
||||
|
||||
* When using raw SQL strings, typing behavior can be added for unicode
|
||||
conversion using the :func:`.text` construct::
|
||||
|
||||
from sqlalchemy import text, Unicode
|
||||
result = conn.execute(
|
||||
text("select username from user").columns(username=Unicode))
|
||||
|
||||
* Otherwise, when using raw SQL strings sent directly to an ``.execute()``
|
||||
method without any Core typing behavior added, the flag
|
||||
``coerce_to_unicode=True`` flag can be passed to :func:`.create_engine`
|
||||
which will add an unconditional unicode processor to cx_Oracle for all
|
||||
string values::
|
||||
|
||||
engine = create_engine("oracle+cx_oracle://dsn", coerce_to_unicode=True)
|
||||
|
||||
The above approach will add significant latency to result-set fetches
|
||||
of plain string values.
|
||||
|
||||
Sending String Values as Unicode or Non-Unicode
|
||||
------------------------------------------------
|
||||
|
||||
As of SQLAlchemy 1.2.2, the cx_Oracle dialect unconditionally calls
|
||||
``setinputsizes()`` for bound values that are passed as Python unicode objects.
|
||||
In Python 3, all string values are Unicode; for cx_Oracle, this corresponds to
|
||||
``cx_Oracle.NCHAR`` being passed to ``setinputsizes()`` for that parameter.
|
||||
In some edge cases, such as passing format specifiers for
|
||||
the ``trunc()`` function, Oracle does not accept these as NCHAR::
|
||||
|
||||
from sqlalchemy import func
|
||||
|
||||
conn.execute(
|
||||
func.trunc(func.sysdate(), 'dd')
|
||||
)
|
||||
|
||||
In these cases, an error as follows may be raised::
|
||||
|
||||
ORA-01899: bad precision specifier
|
||||
|
||||
When this error is encountered, it may be necessary to pass the string value
|
||||
with an explicit non-unicode type::
|
||||
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy import literal
|
||||
from sqlalchemy import String
|
||||
|
||||
conn.execute(
|
||||
func.trunc(func.sysdate(), literal('dd', String))
|
||||
)
|
||||
|
||||
|
||||
.. _cx_oracle_returning:
|
||||
|
||||
RETURNING Support
|
||||
-----------------
|
||||
|
||||
The cx_Oracle dialect implements RETURNING using OUT parameters.
|
||||
The dialect supports RETURNING fully, however cx_Oracle 6 is recommended
|
||||
for complete support.
|
||||
|
||||
.. _cx_oracle_lob:
|
||||
|
||||
LOB Objects
|
||||
-----------
|
||||
|
||||
cx_oracle returns oracle LOBs using the cx_oracle.LOB object. SQLAlchemy
|
||||
converts these to strings so that the interface of the Binary type is
|
||||
consistent with that of other backends, which takes place within a cx_Oracle
|
||||
outputtypehandler.
|
||||
|
||||
cx_Oracle prior to version 6 would require that LOB objects be read before
|
||||
a new batch of rows would be read, as determined by the ``cursor.arraysize``.
|
||||
As of the 6 series, this limitation has been lifted. Nevertheless, because
|
||||
SQLAlchemy pre-reads these LOBs up front, this issue is avoided in any case.
|
||||
|
||||
To disable the auto "read()" feature of the dialect, the flag
|
||||
``auto_convert_lobs=False`` may be passed to :func:`.create_engine`. Under
|
||||
the cx_Oracle 5 series, having this flag turned off means there is the chance
|
||||
of reading from a stale LOB object if not read as it is fetched. With
|
||||
cx_Oracle 6, this issue is resolved.
|
||||
|
||||
.. versionchanged:: 1.2 the LOB handling system has been greatly simplified
|
||||
internally to make use of outputtypehandlers, and no longer makes use
|
||||
of alternate "buffered" result set objects.
|
||||
|
||||
Two Phase Transactions Not Supported
|
||||
-------------------------------------
|
||||
|
||||
Two phase transactions are **not supported** under cx_Oracle due to poor
|
||||
driver support. As of cx_Oracle 6.0b1, the interface for
|
||||
two phase transactions has been changed to be more of a direct pass-through
|
||||
to the underlying OCI layer with less automation. The additional logic
|
||||
to support this system is not implemented in SQLAlchemy.
|
||||
|
||||
.. _cx_oracle_numeric:
|
||||
|
||||
Precision Numerics
|
||||
------------------
|
||||
|
||||
SQLAlchemy's numeric types can handle receiving and returning values as Python
|
||||
``Decimal`` objects or float objects. When a :class:`.Numeric` object, or a
|
||||
subclass such as :class:`.Float`, :class:`.oracle.DOUBLE_PRECISION` etc. is in
|
||||
use, the :paramref:`.Numeric.asdecimal` flag determines if values should be
|
||||
coerced to ``Decimal`` upon return, or returned as float objects. To make
|
||||
matters more complicated under Oracle, Oracle's ``NUMBER`` type can also
|
||||
represent integer values if the "scale" is zero, so the Oracle-specific
|
||||
:class:`.oracle.NUMBER` type takes this into account as well.
|
||||
|
||||
The cx_Oracle dialect makes extensive use of connection- and cursor-level
|
||||
"outputtypehandler" callables in order to coerce numeric values as requested.
|
||||
These callables are specific to the specific flavor of :class:`.Numeric` in
|
||||
use, as well as if no SQLAlchemy typing objects are present. There are
|
||||
observed scenarios where Oracle may sends incomplete or ambiguous information
|
||||
about the numeric types being returned, such as a query where the numeric types
|
||||
are buried under multiple levels of subquery. The type handlers do their best
|
||||
to make the right decision in all cases, deferring to the underlying cx_Oracle
|
||||
DBAPI for all those cases where the driver can make the best decision.
|
||||
|
||||
When no typing objects are present, as when executing plain SQL strings, a
|
||||
default "outputtypehandler" is present which will generally return numeric
|
||||
values which specify precision and scale as Python ``Decimal`` objects. To
|
||||
disable this coercion to decimal for performance reasons, pass the flag
|
||||
``coerce_to_decimal=False`` to :func:`.create_engine`::
|
||||
|
||||
engine = create_engine("oracle+cx_oracle://dsn", coerce_to_decimal=False)
|
||||
|
||||
The ``coerce_to_decimal`` flag only impacts the results of plain string
|
||||
SQL staements that are not otherwise associated with a :class:`.Numeric`
|
||||
SQLAlchemy type (or a subclass of such).
|
||||
|
||||
.. versionchanged:: 1.2 The numeric handling system for cx_Oracle has been
|
||||
reworked to take advantage of newer cx_Oracle features as well
|
||||
as better integration of outputtypehandlers.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .base import OracleCompiler, OracleDialect, OracleExecutionContext
|
||||
from . import base as oracle
|
||||
from ...engine import result as _result
|
||||
from sqlalchemy import types as sqltypes, util, exc, processors
|
||||
import random
|
||||
import collections
|
||||
import decimal
|
||||
import re
|
||||
import time
|
||||
|
||||
|
||||
class _OracleInteger(sqltypes.Integer):
|
||||
def _cx_oracle_var(self, dialect, cursor):
|
||||
cx_Oracle = dialect.dbapi
|
||||
return cursor.var(
|
||||
cx_Oracle.STRING,
|
||||
255,
|
||||
arraysize=cursor.arraysize,
|
||||
outconverter=int
|
||||
)
|
||||
|
||||
def _cx_oracle_outputtypehandler(self, dialect):
|
||||
def handler(cursor, name,
|
||||
default_type, size, precision, scale):
|
||||
return self._cx_oracle_var(dialect, cursor)
|
||||
return handler
|
||||
|
||||
|
||||
class _OracleNumeric(sqltypes.Numeric):
|
||||
is_number = False
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
if self.scale == 0:
|
||||
return None
|
||||
elif self.asdecimal:
|
||||
processor = processors.to_decimal_processor_factory(
|
||||
decimal.Decimal, self._effective_decimal_return_scale)
|
||||
|
||||
def process(value):
|
||||
if isinstance(value, (int, float)):
|
||||
return processor(value)
|
||||
elif value is not None and value.is_infinite():
|
||||
return float(value)
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
else:
|
||||
return processors.to_float
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
return None
|
||||
|
||||
def _cx_oracle_outputtypehandler(self, dialect):
|
||||
cx_Oracle = dialect.dbapi
|
||||
|
||||
is_cx_oracle_6 = dialect._is_cx_oracle_6
|
||||
has_native_int = dialect._has_native_int
|
||||
|
||||
def handler(cursor, name, default_type, size, precision, scale):
|
||||
outconverter = None
|
||||
if precision:
|
||||
if self.asdecimal:
|
||||
if default_type == cx_Oracle.NATIVE_FLOAT:
|
||||
# receiving float and doing Decimal after the fact
|
||||
# allows for float("inf") to be handled
|
||||
type_ = default_type
|
||||
outconverter = decimal.Decimal
|
||||
elif is_cx_oracle_6:
|
||||
type_ = decimal.Decimal
|
||||
else:
|
||||
type_ = cx_Oracle.STRING
|
||||
outconverter = dialect._to_decimal
|
||||
else:
|
||||
if self.is_number and scale == 0:
|
||||
if has_native_int:
|
||||
type_ = cx_Oracle.NATIVE_INT
|
||||
else:
|
||||
type_ = cx_Oracle.NUMBER
|
||||
outconverter = int
|
||||
else:
|
||||
type_ = cx_Oracle.NATIVE_FLOAT
|
||||
else:
|
||||
if self.asdecimal:
|
||||
if default_type == cx_Oracle.NATIVE_FLOAT:
|
||||
type_ = default_type
|
||||
outconverter = decimal.Decimal
|
||||
elif is_cx_oracle_6:
|
||||
type_ = decimal.Decimal
|
||||
else:
|
||||
type_ = cx_Oracle.STRING
|
||||
outconverter = dialect._to_decimal
|
||||
else:
|
||||
if self.is_number and scale == 0:
|
||||
if has_native_int:
|
||||
type_ = cx_Oracle.NATIVE_INT
|
||||
else:
|
||||
type_ = cx_Oracle.NUMBER
|
||||
outconverter = int
|
||||
else:
|
||||
type_ = cx_Oracle.NATIVE_FLOAT
|
||||
|
||||
return cursor.var(
|
||||
type_, 255,
|
||||
arraysize=cursor.arraysize,
|
||||
outconverter=outconverter
|
||||
)
|
||||
|
||||
return handler
|
||||
|
||||
|
||||
class _OracleNUMBER(_OracleNumeric):
|
||||
is_number = True
|
||||
|
||||
|
||||
|
||||
|
||||
class _OracleDate(sqltypes.Date):
|
||||
def bind_processor(self, dialect):
|
||||
return None
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return value.date()
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
class _OracleChar(sqltypes.CHAR):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
return dbapi.FIXED_CHAR
|
||||
|
||||
|
||||
class _OracleNVarChar(sqltypes.NVARCHAR):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
return dbapi.NCHAR
|
||||
|
||||
|
||||
class _OracleText(sqltypes.Text):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
return dbapi.CLOB
|
||||
|
||||
|
||||
class _OracleLong(oracle.LONG):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
return dbapi.LONG_STRING
|
||||
|
||||
|
||||
class _OracleString(sqltypes.String):
|
||||
pass
|
||||
|
||||
|
||||
class _OracleEnum(sqltypes.Enum):
|
||||
def bind_processor(self, dialect):
|
||||
enum_proc = sqltypes.Enum.bind_processor(self, dialect)
|
||||
|
||||
def process(value):
|
||||
raw_str = enum_proc(value)
|
||||
return raw_str
|
||||
return process
|
||||
|
||||
|
||||
class _OracleUnicodeText(sqltypes.UnicodeText):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
return dbapi.NCLOB
|
||||
|
||||
|
||||
class _OracleBinary(sqltypes.LargeBinary):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
return dbapi.BLOB
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
return None
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not dialect.auto_convert_lobs:
|
||||
return None
|
||||
else:
|
||||
return super(_OracleBinary, self).result_processor(
|
||||
dialect, coltype)
|
||||
|
||||
|
||||
class _OracleInterval(oracle.INTERVAL):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
return dbapi.INTERVAL
|
||||
|
||||
|
||||
class _OracleRaw(oracle.RAW):
|
||||
pass
|
||||
|
||||
|
||||
class _OracleRowid(oracle.ROWID):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
return dbapi.ROWID
|
||||
|
||||
|
||||
class OracleCompiler_cx_oracle(OracleCompiler):
|
||||
_oracle_cx_sql_compiler = True
|
||||
|
||||
def bindparam_string(self, name, **kw):
|
||||
quote = getattr(name, 'quote', None)
|
||||
if quote is True or quote is not False and \
|
||||
self.preparer._bindparam_requires_quotes(name):
|
||||
if kw.get('expanding', False):
|
||||
raise exc.CompileError(
|
||||
"Can't use expanding feature with parameter name "
|
||||
"%r on Oracle; it requires quoting which is not supported "
|
||||
"in this context." % name)
|
||||
quoted_name = '"%s"' % name
|
||||
self._quoted_bind_names[name] = quoted_name
|
||||
return OracleCompiler.bindparam_string(self, quoted_name, **kw)
|
||||
else:
|
||||
return OracleCompiler.bindparam_string(self, name, **kw)
|
||||
|
||||
|
||||
class OracleExecutionContext_cx_oracle(OracleExecutionContext):
|
||||
out_parameters = None
|
||||
|
||||
def _setup_quoted_bind_names(self):
|
||||
quoted_bind_names = self.compiled._quoted_bind_names
|
||||
if quoted_bind_names:
|
||||
for param in self.parameters:
|
||||
for fromname, toname in quoted_bind_names.items():
|
||||
param[toname] = param[fromname]
|
||||
del param[fromname]
|
||||
|
||||
def _handle_out_parameters(self):
|
||||
# if a single execute, check for outparams
|
||||
if len(self.compiled_parameters) == 1:
|
||||
quoted_bind_names = self.compiled._quoted_bind_names
|
||||
for bindparam in self.compiled.binds.values():
|
||||
if bindparam.isoutparam:
|
||||
name = self.compiled.bind_names[bindparam]
|
||||
type_impl = bindparam.type.dialect_impl(self.dialect)
|
||||
if hasattr(type_impl, '_cx_oracle_var'):
|
||||
self.out_parameters[name] = type_impl._cx_oracle_var(
|
||||
self.dialect, self.cursor)
|
||||
else:
|
||||
dbtype = type_impl.get_dbapi_type(self.dialect.dbapi)
|
||||
if dbtype is None:
|
||||
raise exc.InvalidRequestError(
|
||||
"Cannot create out parameter for parameter "
|
||||
"%r - its type %r is not supported by"
|
||||
" cx_oracle" %
|
||||
(bindparam.key, bindparam.type)
|
||||
)
|
||||
self.out_parameters[name] = self.cursor.var(dbtype)
|
||||
self.parameters[0][quoted_bind_names.get(name, name)] = \
|
||||
self.out_parameters[name]
|
||||
|
||||
def _generate_cursor_outputtype_handler(self):
|
||||
output_handlers = {}
|
||||
|
||||
for (keyname, name, objects, type_) in self.compiled._result_columns:
|
||||
handler = type_._cached_custom_processor(
|
||||
self.dialect,
|
||||
'cx_oracle_outputtypehandler',
|
||||
self._get_cx_oracle_type_handler)
|
||||
|
||||
if handler:
|
||||
denormalized_name = self.dialect.denormalize_name(keyname)
|
||||
output_handlers[denormalized_name] = handler
|
||||
|
||||
if output_handlers:
|
||||
default_handler = self._dbapi_connection.outputtypehandler
|
||||
|
||||
def output_type_handler(cursor, name, default_type,
|
||||
size, precision, scale):
|
||||
if name in output_handlers:
|
||||
return output_handlers[name](
|
||||
cursor, name,
|
||||
default_type, size, precision, scale)
|
||||
else:
|
||||
return default_handler(
|
||||
cursor, name, default_type, size, precision, scale
|
||||
)
|
||||
self.cursor.outputtypehandler = output_type_handler
|
||||
|
||||
def _get_cx_oracle_type_handler(self, impl):
|
||||
if hasattr(impl, "_cx_oracle_outputtypehandler"):
|
||||
return impl._cx_oracle_outputtypehandler(self.dialect)
|
||||
else:
|
||||
return None
|
||||
|
||||
def pre_exec(self):
|
||||
if not getattr(self.compiled, "_oracle_cx_sql_compiler", False):
|
||||
return
|
||||
|
||||
self.out_parameters = {}
|
||||
|
||||
if self.compiled._quoted_bind_names:
|
||||
self._setup_quoted_bind_names()
|
||||
|
||||
self.set_input_sizes(
|
||||
self.compiled._quoted_bind_names,
|
||||
include_types=self.dialect._include_setinputsizes
|
||||
)
|
||||
|
||||
self._handle_out_parameters()
|
||||
|
||||
self._generate_cursor_outputtype_handler()
|
||||
|
||||
def create_cursor(self):
|
||||
c = self._dbapi_connection.cursor()
|
||||
if self.dialect.arraysize:
|
||||
c.arraysize = self.dialect.arraysize
|
||||
|
||||
return c
|
||||
|
||||
def get_result_proxy(self):
|
||||
if self.out_parameters and self.compiled.returning:
|
||||
returning_params = [
|
||||
self.out_parameters["ret_%d" % i].getvalue()
|
||||
for i in range(len(self.out_parameters))
|
||||
]
|
||||
return ReturningResultProxy(self, returning_params)
|
||||
|
||||
result = _result.ResultProxy(self)
|
||||
|
||||
if self.out_parameters:
|
||||
if self.compiled_parameters is not None and \
|
||||
len(self.compiled_parameters) == 1:
|
||||
result.out_parameters = out_parameters = {}
|
||||
|
||||
for bind, name in self.compiled.bind_names.items():
|
||||
if name in self.out_parameters:
|
||||
type = bind.type
|
||||
impl_type = type.dialect_impl(self.dialect)
|
||||
dbapi_type = impl_type.get_dbapi_type(
|
||||
self.dialect.dbapi)
|
||||
result_processor = impl_type.\
|
||||
result_processor(self.dialect,
|
||||
dbapi_type)
|
||||
if result_processor is not None:
|
||||
out_parameters[name] = \
|
||||
result_processor(
|
||||
self.out_parameters[name].getvalue())
|
||||
else:
|
||||
out_parameters[name] = self.out_parameters[
|
||||
name].getvalue()
|
||||
else:
|
||||
result.out_parameters = dict(
|
||||
(k, v.getvalue())
|
||||
for k, v in self.out_parameters.items()
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class ReturningResultProxy(_result.FullyBufferedResultProxy):
|
||||
"""Result proxy which stuffs the _returning clause + outparams
|
||||
into the fetch."""
|
||||
|
||||
def __init__(self, context, returning_params):
|
||||
self._returning_params = returning_params
|
||||
super(ReturningResultProxy, self).__init__(context)
|
||||
|
||||
def _cursor_description(self):
|
||||
returning = self.context.compiled.returning
|
||||
return [
|
||||
(getattr(col, 'name', col.anon_label), None)
|
||||
for col in returning
|
||||
]
|
||||
|
||||
def _buffer_rows(self):
|
||||
return collections.deque(
|
||||
[tuple(self._returning_params)]
|
||||
)
|
||||
|
||||
|
||||
class OracleDialect_cx_oracle(OracleDialect):
|
||||
execution_ctx_cls = OracleExecutionContext_cx_oracle
|
||||
statement_compiler = OracleCompiler_cx_oracle
|
||||
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = True
|
||||
|
||||
supports_unicode_statements = True
|
||||
supports_unicode_binds = True
|
||||
|
||||
driver = "cx_oracle"
|
||||
|
||||
colspecs = colspecs = {
|
||||
sqltypes.Numeric: _OracleNumeric,
|
||||
sqltypes.Float: _OracleNumeric,
|
||||
sqltypes.Integer: _OracleInteger,
|
||||
oracle.NUMBER: _OracleNUMBER,
|
||||
|
||||
sqltypes.Date: _OracleDate,
|
||||
sqltypes.LargeBinary: _OracleBinary,
|
||||
sqltypes.Boolean: oracle._OracleBoolean,
|
||||
sqltypes.Interval: _OracleInterval,
|
||||
oracle.INTERVAL: _OracleInterval,
|
||||
sqltypes.Text: _OracleText,
|
||||
sqltypes.String: _OracleString,
|
||||
sqltypes.UnicodeText: _OracleUnicodeText,
|
||||
sqltypes.CHAR: _OracleChar,
|
||||
sqltypes.Enum: _OracleEnum,
|
||||
|
||||
oracle.LONG: _OracleLong,
|
||||
oracle.RAW: _OracleRaw,
|
||||
sqltypes.Unicode: _OracleNVarChar,
|
||||
sqltypes.NVARCHAR: _OracleNVarChar,
|
||||
oracle.ROWID: _OracleRowid,
|
||||
}
|
||||
|
||||
execute_sequence_format = list
|
||||
|
||||
def __init__(self,
|
||||
auto_convert_lobs=True,
|
||||
threaded=True,
|
||||
coerce_to_unicode=False,
|
||||
coerce_to_decimal=True,
|
||||
arraysize=50,
|
||||
**kwargs):
|
||||
|
||||
self._pop_deprecated_kwargs(kwargs)
|
||||
|
||||
OracleDialect.__init__(self, **kwargs)
|
||||
self.threaded = threaded
|
||||
self.arraysize = arraysize
|
||||
self.auto_convert_lobs = auto_convert_lobs
|
||||
self.coerce_to_unicode = coerce_to_unicode
|
||||
self.coerce_to_decimal = coerce_to_decimal
|
||||
|
||||
cx_Oracle = self.dbapi
|
||||
|
||||
if cx_Oracle is None:
|
||||
self._include_setinputsizes = {}
|
||||
self.cx_oracle_ver = (0, 0, 0)
|
||||
else:
|
||||
self.cx_oracle_ver = self._parse_cx_oracle_ver(cx_Oracle.version)
|
||||
if self.cx_oracle_ver < (5, 2) and self.cx_oracle_ver > (0, 0, 0):
|
||||
raise exc.InvalidRequestError(
|
||||
"cx_Oracle version 5.2 and above are supported")
|
||||
|
||||
self._has_native_int = hasattr(cx_Oracle, "NATIVE_INT")
|
||||
|
||||
self._include_setinputsizes = {
|
||||
cx_Oracle.NCLOB, cx_Oracle.CLOB, cx_Oracle.LOB,
|
||||
cx_Oracle.NCHAR, cx_Oracle.FIXED_NCHAR,
|
||||
cx_Oracle.BLOB, cx_Oracle.FIXED_CHAR, cx_Oracle.TIMESTAMP
|
||||
}
|
||||
|
||||
self._is_cx_oracle_6 = self.cx_oracle_ver >= (6, )
|
||||
|
||||
def _pop_deprecated_kwargs(self, kwargs):
|
||||
auto_setinputsizes = kwargs.pop('auto_setinputsizes', None)
|
||||
exclude_setinputsizes = kwargs.pop('exclude_setinputsizes', None)
|
||||
if auto_setinputsizes or exclude_setinputsizes:
|
||||
util.warn_deprecated(
|
||||
"auto_setinputsizes and exclude_setinputsizes are deprecated. "
|
||||
"Modern cx_Oracle only requires that LOB types are part "
|
||||
"of this behavior, and these parameters no longer have any "
|
||||
"effect.")
|
||||
allow_twophase = kwargs.pop('allow_twophase', None)
|
||||
if allow_twophase is not None:
|
||||
util.warn.deprecated(
|
||||
"allow_twophase is deprecated. The cx_Oracle dialect no "
|
||||
"longer supports two-phase transaction mode."
|
||||
)
|
||||
|
||||
def _parse_cx_oracle_ver(self, version):
|
||||
m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?', version)
|
||||
if m:
|
||||
return tuple(
|
||||
int(x)
|
||||
for x in m.group(1, 2, 3)
|
||||
if x is not None)
|
||||
else:
|
||||
return (0, 0, 0)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
import cx_Oracle
|
||||
return cx_Oracle
|
||||
|
||||
def initialize(self, connection):
|
||||
super(OracleDialect_cx_oracle, self).initialize(connection)
|
||||
if self._is_oracle_8:
|
||||
self.supports_unicode_binds = False
|
||||
|
||||
self._detect_decimal_char(connection)
|
||||
|
||||
def _detect_decimal_char(self, connection):
|
||||
# we have the option to change this setting upon connect,
|
||||
# or just look at what it is upon connect and convert.
|
||||
# to minimize the chance of interference with changes to
|
||||
# NLS_TERRITORY or formatting behavior of the DB, we opt
|
||||
# to just look at it
|
||||
|
||||
self._decimal_char = connection.scalar(
|
||||
"select value from nls_session_parameters "
|
||||
"where parameter = 'NLS_NUMERIC_CHARACTERS'")[0]
|
||||
if self._decimal_char != '.':
|
||||
_detect_decimal = self._detect_decimal
|
||||
_to_decimal = self._to_decimal
|
||||
|
||||
self._detect_decimal = lambda value: _detect_decimal(
|
||||
value.replace(self._decimal_char, "."))
|
||||
self._to_decimal = lambda value: _to_decimal(
|
||||
value.replace(self._decimal_char, "."))
|
||||
|
||||
def _detect_decimal(self, value):
|
||||
if "." in value:
|
||||
return self._to_decimal(value)
|
||||
else:
|
||||
return int(value)
|
||||
|
||||
_to_decimal = decimal.Decimal
|
||||
|
||||
def _generate_connection_outputtype_handler(self):
|
||||
"""establish the default outputtypehandler established at the
|
||||
connection level.
|
||||
|
||||
"""
|
||||
|
||||
dialect = self
|
||||
cx_Oracle = dialect.dbapi
|
||||
|
||||
number_handler = _OracleNUMBER(asdecimal=True).\
|
||||
_cx_oracle_outputtypehandler(dialect)
|
||||
float_handler = _OracleNUMBER(asdecimal=False).\
|
||||
_cx_oracle_outputtypehandler(dialect)
|
||||
|
||||
def output_type_handler(cursor, name, default_type,
|
||||
size, precision, scale):
|
||||
if default_type == cx_Oracle.NUMBER:
|
||||
if not dialect.coerce_to_decimal:
|
||||
return None
|
||||
elif precision == 0 and scale in (0, -127):
|
||||
# ambiguous type, this occurs when selecting
|
||||
# numbers from deep subqueries
|
||||
return cursor.var(
|
||||
cx_Oracle.STRING,
|
||||
255,
|
||||
outconverter=dialect._detect_decimal,
|
||||
arraysize=cursor.arraysize)
|
||||
elif precision and scale > 0:
|
||||
return number_handler(
|
||||
cursor, name, default_type, size, precision, scale
|
||||
)
|
||||
else:
|
||||
return float_handler(
|
||||
cursor, name, default_type, size, precision, scale
|
||||
)
|
||||
|
||||
# allow all strings to come back natively as Unicode
|
||||
elif dialect.coerce_to_unicode and \
|
||||
default_type in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR):
|
||||
return cursor.var(
|
||||
util.text_type, size, cursor.arraysize
|
||||
)
|
||||
elif dialect.auto_convert_lobs and default_type in (
|
||||
cx_Oracle.CLOB, cx_Oracle.NCLOB, cx_Oracle.BLOB
|
||||
):
|
||||
return cursor.var(
|
||||
default_type, size, cursor.arraysize,
|
||||
outconverter=lambda value: value.read()
|
||||
)
|
||||
return output_type_handler
|
||||
|
||||
def on_connect(self):
|
||||
|
||||
output_type_handler = self._generate_connection_outputtype_handler()
|
||||
|
||||
def on_connect(conn):
|
||||
conn.outputtypehandler = output_type_handler
|
||||
|
||||
return on_connect
|
||||
|
||||
def create_connect_args(self, url):
|
||||
dialect_opts = dict(url.query)
|
||||
|
||||
for opt in ('use_ansi', 'auto_setinputsizes', 'auto_convert_lobs',
|
||||
'threaded', 'allow_twophase'):
|
||||
if opt in dialect_opts:
|
||||
util.coerce_kw_type(dialect_opts, opt, bool)
|
||||
setattr(self, opt, dialect_opts[opt])
|
||||
|
||||
database = url.database
|
||||
service_name = dialect_opts.get('service_name', None)
|
||||
if database or service_name:
|
||||
# if we have a database, then we have a remote host
|
||||
port = url.port
|
||||
if port:
|
||||
port = int(port)
|
||||
else:
|
||||
port = 1521
|
||||
|
||||
if database and service_name:
|
||||
raise exc.InvalidRequestError(
|
||||
'"service_name" option shouldn\'t '
|
||||
'be used with a "database" part of the url')
|
||||
if database:
|
||||
makedsn_kwargs = {'sid': database}
|
||||
if service_name:
|
||||
makedsn_kwargs = {'service_name': service_name}
|
||||
|
||||
dsn = self.dbapi.makedsn(url.host, port, **makedsn_kwargs)
|
||||
else:
|
||||
# we have a local tnsname
|
||||
dsn = url.host
|
||||
|
||||
opts = dict(
|
||||
threaded=self.threaded,
|
||||
)
|
||||
|
||||
if dsn is not None:
|
||||
opts['dsn'] = dsn
|
||||
if url.password is not None:
|
||||
opts['password'] = url.password
|
||||
if url.username is not None:
|
||||
opts['user'] = url.username
|
||||
|
||||
if 'mode' in url.query:
|
||||
opts['mode'] = url.query['mode']
|
||||
if isinstance(opts['mode'], util.string_types):
|
||||
mode = opts['mode'].upper()
|
||||
if mode == 'SYSDBA':
|
||||
opts['mode'] = self.dbapi.SYSDBA
|
||||
elif mode == 'SYSOPER':
|
||||
opts['mode'] = self.dbapi.SYSOPER
|
||||
else:
|
||||
util.coerce_kw_type(opts, 'mode', int)
|
||||
return ([], opts)
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
return tuple(
|
||||
int(x)
|
||||
for x in connection.connection.version.split('.')
|
||||
)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
error, = e.args
|
||||
if isinstance(
|
||||
e,
|
||||
(self.dbapi.InterfaceError, self.dbapi.DatabaseError)
|
||||
) and "not connected" in str(e):
|
||||
return True
|
||||
|
||||
if hasattr(error, 'code'):
|
||||
# ORA-00028: your session has been killed
|
||||
# ORA-03114: not connected to ORACLE
|
||||
# ORA-03113: end-of-file on communication channel
|
||||
# ORA-03135: connection lost contact
|
||||
# ORA-01033: ORACLE initialization or shutdown in progress
|
||||
# ORA-02396: exceeded maximum idle time, please connect again
|
||||
# TODO: Others ?
|
||||
return error.code in (28, 3114, 3113, 3135, 1033, 2396)
|
||||
else:
|
||||
return False
|
||||
|
||||
def create_xid(self):
|
||||
"""create a two-phase transaction ID.
|
||||
|
||||
this id will be passed to do_begin_twophase(), do_rollback_twophase(),
|
||||
do_commit_twophase(). its format is unspecified.
|
||||
|
||||
.. deprecated:: two-phase transaction support is no longer functional
|
||||
in SQLAlchemy's cx_Oracle dialect as of cx_Oracle 6.0b1
|
||||
|
||||
"""
|
||||
|
||||
id = random.randint(0, 2 ** 128)
|
||||
return (0x1234, "%032x" % id, "%032x" % 9)
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context=None):
|
||||
if isinstance(parameters, tuple):
|
||||
parameters = list(parameters)
|
||||
cursor.executemany(statement, parameters)
|
||||
|
||||
def do_begin_twophase(self, connection, xid):
|
||||
connection.connection.begin(*xid)
|
||||
|
||||
def do_prepare_twophase(self, connection, xid):
|
||||
result = connection.connection.prepare()
|
||||
connection.info['cx_oracle_prepared'] = result
|
||||
|
||||
def do_rollback_twophase(self, connection, xid, is_prepared=True,
|
||||
recover=False):
|
||||
self.do_rollback(connection.connection)
|
||||
|
||||
def do_commit_twophase(self, connection, xid, is_prepared=True,
|
||||
recover=False):
|
||||
if not is_prepared:
|
||||
self.do_commit(connection.connection)
|
||||
else:
|
||||
oci_prepared = connection.info['cx_oracle_prepared']
|
||||
if oci_prepared:
|
||||
self.do_commit(connection.connection)
|
||||
|
||||
def do_recover_twophase(self, connection):
|
||||
connection.info.pop('cx_oracle_prepared', None)
|
||||
|
||||
dialect = OracleDialect_cx_oracle
|
|
@ -0,0 +1,235 @@
|
|||
# oracle/zxjdbc.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: oracle+zxjdbc
|
||||
:name: zxJDBC for Jython
|
||||
:dbapi: zxjdbc
|
||||
:connectstring: oracle+zxjdbc://user:pass@host/dbname
|
||||
:driverurl: http://www.oracle.com/technetwork/database/features/jdbc/index-091264.html
|
||||
|
||||
.. note:: Jython is not supported by current versions of SQLAlchemy. The
|
||||
zxjdbc dialect should be considered as experimental.
|
||||
|
||||
"""
|
||||
import decimal
|
||||
import re
|
||||
|
||||
from sqlalchemy import sql, types as sqltypes, util
|
||||
from sqlalchemy.connectors.zxJDBC import ZxJDBCConnector
|
||||
from sqlalchemy.dialects.oracle.base import (OracleCompiler,
|
||||
OracleDialect,
|
||||
OracleExecutionContext)
|
||||
from sqlalchemy.engine import result as _result
|
||||
from sqlalchemy.sql import expression
|
||||
import collections
|
||||
|
||||
SQLException = zxJDBC = None
|
||||
|
||||
|
||||
class _ZxJDBCDate(sqltypes.Date):
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
def process(value):
|
||||
if value is None:
|
||||
return None
|
||||
else:
|
||||
return value.date()
|
||||
return process
|
||||
|
||||
|
||||
class _ZxJDBCNumeric(sqltypes.Numeric):
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
# XXX: does the dialect return Decimal or not???
|
||||
# if it does (in all cases), we could use a None processor as well as
|
||||
# the to_float generic processor
|
||||
if self.asdecimal:
|
||||
def process(value):
|
||||
if isinstance(value, decimal.Decimal):
|
||||
return value
|
||||
else:
|
||||
return decimal.Decimal(str(value))
|
||||
else:
|
||||
def process(value):
|
||||
if isinstance(value, decimal.Decimal):
|
||||
return float(value)
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
class OracleCompiler_zxjdbc(OracleCompiler):
|
||||
|
||||
def returning_clause(self, stmt, returning_cols):
|
||||
self.returning_cols = list(
|
||||
expression._select_iterables(returning_cols))
|
||||
|
||||
# within_columns_clause=False so that labels (foo AS bar) don't render
|
||||
columns = [self.process(c, within_columns_clause=False)
|
||||
for c in self.returning_cols]
|
||||
|
||||
if not hasattr(self, 'returning_parameters'):
|
||||
self.returning_parameters = []
|
||||
|
||||
binds = []
|
||||
for i, col in enumerate(self.returning_cols):
|
||||
dbtype = col.type.dialect_impl(
|
||||
self.dialect).get_dbapi_type(self.dialect.dbapi)
|
||||
self.returning_parameters.append((i + 1, dbtype))
|
||||
|
||||
bindparam = sql.bindparam(
|
||||
"ret_%d" % i, value=ReturningParam(dbtype))
|
||||
self.binds[bindparam.key] = bindparam
|
||||
binds.append(
|
||||
self.bindparam_string(self._truncate_bindparam(bindparam)))
|
||||
|
||||
return 'RETURNING ' + ', '.join(columns) + " INTO " + ", ".join(binds)
|
||||
|
||||
|
||||
class OracleExecutionContext_zxjdbc(OracleExecutionContext):
|
||||
|
||||
def pre_exec(self):
|
||||
if hasattr(self.compiled, 'returning_parameters'):
|
||||
# prepare a zxJDBC statement so we can grab its underlying
|
||||
# OraclePreparedStatement's getReturnResultSet later
|
||||
self.statement = self.cursor.prepare(self.statement)
|
||||
|
||||
def get_result_proxy(self):
|
||||
if hasattr(self.compiled, 'returning_parameters'):
|
||||
rrs = None
|
||||
try:
|
||||
try:
|
||||
rrs = self.statement.__statement__.getReturnResultSet()
|
||||
next(rrs)
|
||||
except SQLException as sqle:
|
||||
msg = '%s [SQLCode: %d]' % (
|
||||
sqle.getMessage(), sqle.getErrorCode())
|
||||
if sqle.getSQLState() is not None:
|
||||
msg += ' [SQLState: %s]' % sqle.getSQLState()
|
||||
raise zxJDBC.Error(msg)
|
||||
else:
|
||||
row = tuple(
|
||||
self.cursor.datahandler.getPyObject(
|
||||
rrs, index, dbtype)
|
||||
for index, dbtype in
|
||||
self.compiled.returning_parameters)
|
||||
return ReturningResultProxy(self, row)
|
||||
finally:
|
||||
if rrs is not None:
|
||||
try:
|
||||
rrs.close()
|
||||
except SQLException:
|
||||
pass
|
||||
self.statement.close()
|
||||
|
||||
return _result.ResultProxy(self)
|
||||
|
||||
def create_cursor(self):
|
||||
cursor = self._dbapi_connection.cursor()
|
||||
cursor.datahandler = self.dialect.DataHandler(cursor.datahandler)
|
||||
return cursor
|
||||
|
||||
|
||||
class ReturningResultProxy(_result.FullyBufferedResultProxy):
|
||||
|
||||
"""ResultProxy backed by the RETURNING ResultSet results."""
|
||||
|
||||
def __init__(self, context, returning_row):
|
||||
self._returning_row = returning_row
|
||||
super(ReturningResultProxy, self).__init__(context)
|
||||
|
||||
def _cursor_description(self):
|
||||
ret = []
|
||||
for c in self.context.compiled.returning_cols:
|
||||
if hasattr(c, 'name'):
|
||||
ret.append((c.name, c.type))
|
||||
else:
|
||||
ret.append((c.anon_label, c.type))
|
||||
return ret
|
||||
|
||||
def _buffer_rows(self):
|
||||
return collections.deque([self._returning_row])
|
||||
|
||||
|
||||
class ReturningParam(object):
|
||||
|
||||
"""A bindparam value representing a RETURNING parameter.
|
||||
|
||||
Specially handled by OracleReturningDataHandler.
|
||||
"""
|
||||
|
||||
def __init__(self, type):
|
||||
self.type = type
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, ReturningParam):
|
||||
return self.type == other.type
|
||||
return NotImplemented
|
||||
|
||||
def __ne__(self, other):
|
||||
if isinstance(other, ReturningParam):
|
||||
return self.type != other.type
|
||||
return NotImplemented
|
||||
|
||||
def __repr__(self):
|
||||
kls = self.__class__
|
||||
return '<%s.%s object at 0x%x type=%s>' % (
|
||||
kls.__module__, kls.__name__, id(self), self.type)
|
||||
|
||||
|
||||
class OracleDialect_zxjdbc(ZxJDBCConnector, OracleDialect):
|
||||
jdbc_db_name = 'oracle'
|
||||
jdbc_driver_name = 'oracle.jdbc.OracleDriver'
|
||||
|
||||
statement_compiler = OracleCompiler_zxjdbc
|
||||
execution_ctx_cls = OracleExecutionContext_zxjdbc
|
||||
|
||||
colspecs = util.update_copy(
|
||||
OracleDialect.colspecs,
|
||||
{
|
||||
sqltypes.Date: _ZxJDBCDate,
|
||||
sqltypes.Numeric: _ZxJDBCNumeric
|
||||
}
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(OracleDialect_zxjdbc, self).__init__(*args, **kwargs)
|
||||
global SQLException, zxJDBC
|
||||
from java.sql import SQLException
|
||||
from com.ziclix.python.sql import zxJDBC
|
||||
from com.ziclix.python.sql.handler import OracleDataHandler
|
||||
|
||||
class OracleReturningDataHandler(OracleDataHandler):
|
||||
"""zxJDBC DataHandler that specially handles ReturningParam."""
|
||||
|
||||
def setJDBCObject(self, statement, index, object, dbtype=None):
|
||||
if type(object) is ReturningParam:
|
||||
statement.registerReturnParameter(index, object.type)
|
||||
elif dbtype is None:
|
||||
OracleDataHandler.setJDBCObject(
|
||||
self, statement, index, object)
|
||||
else:
|
||||
OracleDataHandler.setJDBCObject(
|
||||
self, statement, index, object, dbtype)
|
||||
self.DataHandler = OracleReturningDataHandler
|
||||
|
||||
def initialize(self, connection):
|
||||
super(OracleDialect_zxjdbc, self).initialize(connection)
|
||||
self.implicit_returning = \
|
||||
connection.connection.driverversion >= '10.2'
|
||||
|
||||
def _create_jdbc_url(self, url):
|
||||
return 'jdbc:oracle:thin:@%s:%s:%s' % (
|
||||
url.host, url.port or 1521, url.database)
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
version = re.search(
|
||||
r'Release ([\d\.]+)', connection.connection.dbversion).group(1)
|
||||
return tuple(int(x) for x in version.split('.'))
|
||||
|
||||
dialect = OracleDialect_zxjdbc
|
|
@ -0,0 +1,37 @@
|
|||
# postgresql/__init__.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from . import base, psycopg2, pg8000, pypostgresql, pygresql, \
|
||||
zxjdbc, psycopg2cffi # noqa
|
||||
|
||||
from .base import \
|
||||
INTEGER, BIGINT, SMALLINT, VARCHAR, CHAR, TEXT, NUMERIC, FLOAT, REAL, \
|
||||
INET, CIDR, UUID, BIT, MACADDR, MONEY, OID, REGCLASS, DOUBLE_PRECISION, \
|
||||
TIMESTAMP, TIME, DATE, BYTEA, BOOLEAN, INTERVAL, ENUM, TSVECTOR, \
|
||||
DropEnumType, CreateEnumType
|
||||
from .hstore import HSTORE, hstore
|
||||
from .json import JSON, JSONB, json
|
||||
from .array import array, ARRAY, Any, All
|
||||
from .ext import aggregate_order_by, ExcludeConstraint, array_agg
|
||||
from .dml import insert, Insert
|
||||
|
||||
from .ranges import INT4RANGE, INT8RANGE, NUMRANGE, DATERANGE, TSRANGE, \
|
||||
TSTZRANGE
|
||||
|
||||
base.dialect = dialect = psycopg2.dialect
|
||||
|
||||
|
||||
__all__ = (
|
||||
'INTEGER', 'BIGINT', 'SMALLINT', 'VARCHAR', 'CHAR', 'TEXT', 'NUMERIC',
|
||||
'FLOAT', 'REAL', 'INET', 'CIDR', 'UUID', 'BIT', 'MACADDR', 'MONEY', 'OID',
|
||||
'REGCLASS', 'DOUBLE_PRECISION', 'TIMESTAMP', 'TIME', 'DATE', 'BYTEA',
|
||||
'BOOLEAN', 'INTERVAL', 'ARRAY', 'ENUM', 'dialect', 'array', 'HSTORE',
|
||||
'hstore', 'INT4RANGE', 'INT8RANGE', 'NUMRANGE', 'DATERANGE',
|
||||
'TSRANGE', 'TSTZRANGE', 'json', 'JSON', 'JSONB', 'Any', 'All',
|
||||
'DropEnumType', 'CreateEnumType', 'ExcludeConstraint',
|
||||
'aggregate_order_by', 'array_agg', 'insert', 'Insert'
|
||||
)
|
|
@ -0,0 +1,303 @@
|
|||
# postgresql/array.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .base import ischema_names, colspecs
|
||||
from ...sql import expression, operators
|
||||
from ...sql.base import SchemaEventTarget
|
||||
from ... import types as sqltypes
|
||||
|
||||
try:
|
||||
from uuid import UUID as _python_UUID
|
||||
except ImportError:
|
||||
_python_UUID = None
|
||||
|
||||
|
||||
def Any(other, arrexpr, operator=operators.eq):
|
||||
"""A synonym for the :meth:`.ARRAY.Comparator.any` method.
|
||||
|
||||
This method is legacy and is here for backwards-compatibility.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.expression.any_`
|
||||
|
||||
"""
|
||||
|
||||
return arrexpr.any(other, operator)
|
||||
|
||||
|
||||
def All(other, arrexpr, operator=operators.eq):
|
||||
"""A synonym for the :meth:`.ARRAY.Comparator.all` method.
|
||||
|
||||
This method is legacy and is here for backwards-compatibility.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.expression.all_`
|
||||
|
||||
"""
|
||||
|
||||
return arrexpr.all(other, operator)
|
||||
|
||||
|
||||
class array(expression.Tuple):
|
||||
|
||||
"""A PostgreSQL ARRAY literal.
|
||||
|
||||
This is used to produce ARRAY literals in SQL expressions, e.g.::
|
||||
|
||||
from sqlalchemy.dialects.postgresql import array
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy import select, func
|
||||
|
||||
stmt = select([
|
||||
array([1,2]) + array([3,4,5])
|
||||
])
|
||||
|
||||
print stmt.compile(dialect=postgresql.dialect())
|
||||
|
||||
Produces the SQL::
|
||||
|
||||
SELECT ARRAY[%(param_1)s, %(param_2)s] ||
|
||||
ARRAY[%(param_3)s, %(param_4)s, %(param_5)s]) AS anon_1
|
||||
|
||||
An instance of :class:`.array` will always have the datatype
|
||||
:class:`.ARRAY`. The "inner" type of the array is inferred from
|
||||
the values present, unless the ``type_`` keyword argument is passed::
|
||||
|
||||
array(['foo', 'bar'], type_=CHAR)
|
||||
|
||||
.. versionadded:: 0.8 Added the :class:`~.postgresql.array` literal type.
|
||||
|
||||
See also:
|
||||
|
||||
:class:`.postgresql.ARRAY`
|
||||
|
||||
"""
|
||||
__visit_name__ = 'array'
|
||||
|
||||
def __init__(self, clauses, **kw):
|
||||
super(array, self).__init__(*clauses, **kw)
|
||||
self.type = ARRAY(self.type)
|
||||
|
||||
def _bind_param(self, operator, obj, _assume_scalar=False, type_=None):
|
||||
if _assume_scalar or operator is operators.getitem:
|
||||
# if getitem->slice were called, Indexable produces
|
||||
# a Slice object from that
|
||||
assert isinstance(obj, int)
|
||||
return expression.BindParameter(
|
||||
None, obj, _compared_to_operator=operator,
|
||||
type_=type_,
|
||||
_compared_to_type=self.type, unique=True)
|
||||
|
||||
else:
|
||||
return array([
|
||||
self._bind_param(operator, o, _assume_scalar=True, type_=type_)
|
||||
for o in obj])
|
||||
|
||||
def self_group(self, against=None):
|
||||
if (against in (
|
||||
operators.any_op, operators.all_op, operators.getitem)):
|
||||
return expression.Grouping(self)
|
||||
else:
|
||||
return self
|
||||
|
||||
|
||||
CONTAINS = operators.custom_op("@>", precedence=5)
|
||||
|
||||
CONTAINED_BY = operators.custom_op("<@", precedence=5)
|
||||
|
||||
OVERLAP = operators.custom_op("&&", precedence=5)
|
||||
|
||||
|
||||
class ARRAY(sqltypes.ARRAY):
|
||||
|
||||
"""PostgreSQL ARRAY type.
|
||||
|
||||
.. versionchanged:: 1.1 The :class:`.postgresql.ARRAY` type is now
|
||||
a subclass of the core :class:`.types.ARRAY` type.
|
||||
|
||||
The :class:`.postgresql.ARRAY` type is constructed in the same way
|
||||
as the core :class:`.types.ARRAY` type; a member type is required, and a
|
||||
number of dimensions is recommended if the type is to be used for more
|
||||
than one dimension::
|
||||
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
mytable = Table("mytable", metadata,
|
||||
Column("data", postgresql.ARRAY(Integer, dimensions=2))
|
||||
)
|
||||
|
||||
The :class:`.postgresql.ARRAY` type provides all operations defined on the
|
||||
core :class:`.types.ARRAY` type, including support for "dimensions", indexed
|
||||
access, and simple matching such as :meth:`.types.ARRAY.Comparator.any`
|
||||
and :meth:`.types.ARRAY.Comparator.all`. :class:`.postgresql.ARRAY` class also
|
||||
provides PostgreSQL-specific methods for containment operations, including
|
||||
:meth:`.postgresql.ARRAY.Comparator.contains`
|
||||
:meth:`.postgresql.ARRAY.Comparator.contained_by`,
|
||||
and :meth:`.postgresql.ARRAY.Comparator.overlap`, e.g.::
|
||||
|
||||
mytable.c.data.contains([1, 2])
|
||||
|
||||
The :class:`.postgresql.ARRAY` type may not be supported on all
|
||||
PostgreSQL DBAPIs; it is currently known to work on psycopg2 only.
|
||||
|
||||
Additionally, the :class:`.postgresql.ARRAY` type does not work directly in
|
||||
conjunction with the :class:`.ENUM` type. For a workaround, see the
|
||||
special type at :ref:`postgresql_array_of_enum`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.types.ARRAY` - base array type
|
||||
|
||||
:class:`.postgresql.array` - produces a literal array value.
|
||||
|
||||
"""
|
||||
|
||||
class Comparator(sqltypes.ARRAY.Comparator):
|
||||
|
||||
"""Define comparison operations for :class:`.ARRAY`.
|
||||
|
||||
Note that these operations are in addition to those provided
|
||||
by the base :class:`.types.ARRAY.Comparator` class, including
|
||||
:meth:`.types.ARRAY.Comparator.any` and
|
||||
:meth:`.types.ARRAY.Comparator.all`.
|
||||
|
||||
"""
|
||||
|
||||
def contains(self, other, **kwargs):
|
||||
"""Boolean expression. Test if elements are a superset of the
|
||||
elements of the argument array expression.
|
||||
"""
|
||||
return self.operate(CONTAINS, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def contained_by(self, other):
|
||||
"""Boolean expression. Test if elements are a proper subset of the
|
||||
elements of the argument array expression.
|
||||
"""
|
||||
return self.operate(
|
||||
CONTAINED_BY, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def overlap(self, other):
|
||||
"""Boolean expression. Test if array has elements in common with
|
||||
an argument array expression.
|
||||
"""
|
||||
return self.operate(OVERLAP, other, result_type=sqltypes.Boolean)
|
||||
|
||||
comparator_factory = Comparator
|
||||
|
||||
def __init__(self, item_type, as_tuple=False, dimensions=None,
|
||||
zero_indexes=False):
|
||||
"""Construct an ARRAY.
|
||||
|
||||
E.g.::
|
||||
|
||||
Column('myarray', ARRAY(Integer))
|
||||
|
||||
Arguments are:
|
||||
|
||||
:param item_type: The data type of items of this array. Note that
|
||||
dimensionality is irrelevant here, so multi-dimensional arrays like
|
||||
``INTEGER[][]``, are constructed as ``ARRAY(Integer)``, not as
|
||||
``ARRAY(ARRAY(Integer))`` or such.
|
||||
|
||||
:param as_tuple=False: Specify whether return results
|
||||
should be converted to tuples from lists. DBAPIs such
|
||||
as psycopg2 return lists by default. When tuples are
|
||||
returned, the results are hashable.
|
||||
|
||||
:param dimensions: if non-None, the ARRAY will assume a fixed
|
||||
number of dimensions. This will cause the DDL emitted for this
|
||||
ARRAY to include the exact number of bracket clauses ``[]``,
|
||||
and will also optimize the performance of the type overall.
|
||||
Note that PG arrays are always implicitly "non-dimensioned",
|
||||
meaning they can store any number of dimensions no matter how
|
||||
they were declared.
|
||||
|
||||
:param zero_indexes=False: when True, index values will be converted
|
||||
between Python zero-based and PostgreSQL one-based indexes, e.g.
|
||||
a value of one will be added to all index values before passing
|
||||
to the database.
|
||||
|
||||
.. versionadded:: 0.9.5
|
||||
|
||||
|
||||
"""
|
||||
if isinstance(item_type, ARRAY):
|
||||
raise ValueError("Do not nest ARRAY types; ARRAY(basetype) "
|
||||
"handles multi-dimensional arrays of basetype")
|
||||
if isinstance(item_type, type):
|
||||
item_type = item_type()
|
||||
self.item_type = item_type
|
||||
self.as_tuple = as_tuple
|
||||
self.dimensions = dimensions
|
||||
self.zero_indexes = zero_indexes
|
||||
|
||||
@property
|
||||
def hashable(self):
|
||||
return self.as_tuple
|
||||
|
||||
@property
|
||||
def python_type(self):
|
||||
return list
|
||||
|
||||
def compare_values(self, x, y):
|
||||
return x == y
|
||||
|
||||
def _proc_array(self, arr, itemproc, dim, collection):
|
||||
if dim is None:
|
||||
arr = list(arr)
|
||||
if dim == 1 or dim is None and (
|
||||
# this has to be (list, tuple), or at least
|
||||
# not hasattr('__iter__'), since Py3K strings
|
||||
# etc. have __iter__
|
||||
not arr or not isinstance(arr[0], (list, tuple))):
|
||||
if itemproc:
|
||||
return collection(itemproc(x) for x in arr)
|
||||
else:
|
||||
return collection(arr)
|
||||
else:
|
||||
return collection(
|
||||
self._proc_array(
|
||||
x, itemproc,
|
||||
dim - 1 if dim is not None else None,
|
||||
collection)
|
||||
for x in arr
|
||||
)
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
item_proc = self.item_type.dialect_impl(dialect).\
|
||||
bind_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
if value is None:
|
||||
return value
|
||||
else:
|
||||
return self._proc_array(
|
||||
value,
|
||||
item_proc,
|
||||
self.dimensions,
|
||||
list)
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
item_proc = self.item_type.dialect_impl(dialect).\
|
||||
result_processor(dialect, coltype)
|
||||
|
||||
def process(value):
|
||||
if value is None:
|
||||
return value
|
||||
else:
|
||||
return self._proc_array(
|
||||
value,
|
||||
item_proc,
|
||||
self.dimensions,
|
||||
tuple if self.as_tuple else list)
|
||||
return process
|
||||
|
||||
colspecs[sqltypes.ARRAY] = ARRAY
|
||||
ischema_names['_array'] = ARRAY
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,214 @@
|
|||
# postgresql/on_conflict.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from ...sql.elements import ClauseElement, _literal_as_binds
|
||||
from ...sql.dml import Insert as StandardInsert
|
||||
from ...sql.expression import alias
|
||||
from ...sql import schema
|
||||
from ...util.langhelpers import public_factory
|
||||
from ...sql.base import _generative
|
||||
from ... import util
|
||||
from . import ext
|
||||
|
||||
__all__ = ('Insert', 'insert')
|
||||
|
||||
|
||||
class Insert(StandardInsert):
|
||||
"""PostgreSQL-specific implementation of INSERT.
|
||||
|
||||
Adds methods for PG-specific syntaxes such as ON CONFLICT.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
|
||||
@util.memoized_property
|
||||
def excluded(self):
|
||||
"""Provide the ``excluded`` namespace for an ON CONFLICT statement
|
||||
|
||||
PG's ON CONFLICT clause allows reference to the row that would
|
||||
be inserted, known as ``excluded``. This attribute provides
|
||||
all columns in this row to be referenaceable.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`postgresql_insert_on_conflict` - example of how
|
||||
to use :attr:`.Insert.excluded`
|
||||
|
||||
"""
|
||||
return alias(self.table, name='excluded').columns
|
||||
|
||||
@_generative
|
||||
def on_conflict_do_update(
|
||||
self,
|
||||
constraint=None, index_elements=None,
|
||||
index_where=None, set_=None, where=None):
|
||||
"""
|
||||
Specifies a DO UPDATE SET action for ON CONFLICT clause.
|
||||
|
||||
Either the ``constraint`` or ``index_elements`` argument is
|
||||
required, but only one of these can be specified.
|
||||
|
||||
:param constraint:
|
||||
The name of a unique or exclusion constraint on the table,
|
||||
or the constraint object itself if it has a .name attribute.
|
||||
|
||||
:param index_elements:
|
||||
A sequence consisting of string column names, :class:`.Column`
|
||||
objects, or other column expression objects that will be used
|
||||
to infer a target index.
|
||||
|
||||
:param index_where:
|
||||
Additional WHERE criterion that can be used to infer a
|
||||
conditional target index.
|
||||
|
||||
:param set_:
|
||||
Required argument. A dictionary or other mapping object
|
||||
with column names as keys and expressions or literals as values,
|
||||
specifying the ``SET`` actions to take.
|
||||
If the target :class:`.Column` specifies a ".key" attribute distinct
|
||||
from the column name, that key should be used.
|
||||
|
||||
.. warning:: This dictionary does **not** take into account
|
||||
Python-specified default UPDATE values or generation functions,
|
||||
e.g. those specified using :paramref:`.Column.onupdate`.
|
||||
These values will not be exercised for an ON CONFLICT style of
|
||||
UPDATE, unless they are manually specified in the
|
||||
:paramref:`.Insert.on_conflict_do_update.set_` dictionary.
|
||||
|
||||
:param where:
|
||||
Optional argument. If present, can be a literal SQL
|
||||
string or an acceptable expression for a ``WHERE`` clause
|
||||
that restricts the rows affected by ``DO UPDATE SET``. Rows
|
||||
not meeting the ``WHERE`` condition will not be updated
|
||||
(effectively a ``DO NOTHING`` for those rows).
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`postgresql_insert_on_conflict`
|
||||
|
||||
"""
|
||||
self._post_values_clause = OnConflictDoUpdate(
|
||||
constraint, index_elements, index_where, set_, where)
|
||||
return self
|
||||
|
||||
@_generative
|
||||
def on_conflict_do_nothing(
|
||||
self,
|
||||
constraint=None, index_elements=None, index_where=None):
|
||||
"""
|
||||
Specifies a DO NOTHING action for ON CONFLICT clause.
|
||||
|
||||
The ``constraint`` and ``index_elements`` arguments
|
||||
are optional, but only one of these can be specified.
|
||||
|
||||
:param constraint:
|
||||
The name of a unique or exclusion constraint on the table,
|
||||
or the constraint object itself if it has a .name attribute.
|
||||
|
||||
:param index_elements:
|
||||
A sequence consisting of string column names, :class:`.Column`
|
||||
objects, or other column expression objects that will be used
|
||||
to infer a target index.
|
||||
|
||||
:param index_where:
|
||||
Additional WHERE criterion that can be used to infer a
|
||||
conditional target index.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`postgresql_insert_on_conflict`
|
||||
|
||||
"""
|
||||
self._post_values_clause = OnConflictDoNothing(
|
||||
constraint, index_elements, index_where)
|
||||
return self
|
||||
|
||||
insert = public_factory(Insert, '.dialects.postgresql.insert')
|
||||
|
||||
|
||||
class OnConflictClause(ClauseElement):
|
||||
def __init__(
|
||||
self,
|
||||
constraint=None,
|
||||
index_elements=None,
|
||||
index_where=None):
|
||||
|
||||
if constraint is not None:
|
||||
if not isinstance(constraint, util.string_types) and \
|
||||
isinstance(constraint, (
|
||||
schema.Index, schema.Constraint,
|
||||
ext.ExcludeConstraint)):
|
||||
constraint = getattr(constraint, 'name') or constraint
|
||||
|
||||
if constraint is not None:
|
||||
if index_elements is not None:
|
||||
raise ValueError(
|
||||
"'constraint' and 'index_elements' are mutually exclusive")
|
||||
|
||||
if isinstance(constraint, util.string_types):
|
||||
self.constraint_target = constraint
|
||||
self.inferred_target_elements = None
|
||||
self.inferred_target_whereclause = None
|
||||
elif isinstance(constraint, schema.Index):
|
||||
index_elements = constraint.expressions
|
||||
index_where = \
|
||||
constraint.dialect_options['postgresql'].get("where")
|
||||
elif isinstance(constraint, ext.ExcludeConstraint):
|
||||
index_elements = constraint.columns
|
||||
index_where = constraint.where
|
||||
else:
|
||||
index_elements = constraint.columns
|
||||
index_where = \
|
||||
constraint.dialect_options['postgresql'].get("where")
|
||||
|
||||
if index_elements is not None:
|
||||
self.constraint_target = None
|
||||
self.inferred_target_elements = index_elements
|
||||
self.inferred_target_whereclause = index_where
|
||||
elif constraint is None:
|
||||
self.constraint_target = self.inferred_target_elements = \
|
||||
self.inferred_target_whereclause = None
|
||||
|
||||
|
||||
class OnConflictDoNothing(OnConflictClause):
|
||||
__visit_name__ = 'on_conflict_do_nothing'
|
||||
|
||||
|
||||
class OnConflictDoUpdate(OnConflictClause):
|
||||
__visit_name__ = 'on_conflict_do_update'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
constraint=None,
|
||||
index_elements=None,
|
||||
index_where=None,
|
||||
set_=None,
|
||||
where=None):
|
||||
super(OnConflictDoUpdate, self).__init__(
|
||||
constraint=constraint,
|
||||
index_elements=index_elements,
|
||||
index_where=index_where)
|
||||
|
||||
if self.inferred_target_elements is None and \
|
||||
self.constraint_target is None:
|
||||
raise ValueError(
|
||||
"Either constraint or index_elements, "
|
||||
"but not both, must be specified unless DO NOTHING")
|
||||
|
||||
if (not isinstance(set_, dict) or not set_):
|
||||
raise ValueError("set parameter must be a non-empty dictionary")
|
||||
self.update_values_to_set = [
|
||||
(key, value)
|
||||
for key, value in set_.items()
|
||||
]
|
||||
self.update_whereclause = where
|
|
@ -0,0 +1,218 @@
|
|||
# postgresql/ext.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from ...sql import expression
|
||||
from ...sql import elements
|
||||
from ...sql import functions
|
||||
from ...sql.schema import ColumnCollectionConstraint
|
||||
from .array import ARRAY
|
||||
|
||||
|
||||
class aggregate_order_by(expression.ColumnElement):
|
||||
"""Represent a PostgreSQL aggregate order by expression.
|
||||
|
||||
E.g.::
|
||||
|
||||
from sqlalchemy.dialects.postgresql import aggregate_order_by
|
||||
expr = func.array_agg(aggregate_order_by(table.c.a, table.c.b.desc()))
|
||||
stmt = select([expr])
|
||||
|
||||
would represent the expression::
|
||||
|
||||
SELECT array_agg(a ORDER BY b DESC) FROM table;
|
||||
|
||||
Similarly::
|
||||
|
||||
expr = func.string_agg(
|
||||
table.c.a,
|
||||
aggregate_order_by(literal_column("','"), table.c.a)
|
||||
)
|
||||
stmt = select([expr])
|
||||
|
||||
Would represent::
|
||||
|
||||
SELECT string_agg(a, ',' ORDER BY a) FROM table;
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.array_agg`
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'aggregate_order_by'
|
||||
|
||||
def __init__(self, target, order_by):
|
||||
self.target = elements._literal_as_binds(target)
|
||||
self.order_by = elements._literal_as_binds(order_by)
|
||||
|
||||
def self_group(self, against=None):
|
||||
return self
|
||||
|
||||
def get_children(self, **kwargs):
|
||||
return self.target, self.order_by
|
||||
|
||||
def _copy_internals(self, clone=elements._clone, **kw):
|
||||
self.target = clone(self.target, **kw)
|
||||
self.order_by = clone(self.order_by, **kw)
|
||||
|
||||
@property
|
||||
def _from_objects(self):
|
||||
return self.target._from_objects + self.order_by._from_objects
|
||||
|
||||
|
||||
class ExcludeConstraint(ColumnCollectionConstraint):
|
||||
"""A table-level EXCLUDE constraint.
|
||||
|
||||
Defines an EXCLUDE constraint as described in the `postgres
|
||||
documentation`__.
|
||||
|
||||
__ http://www.postgresql.org/docs/9.0/\
|
||||
static/sql-createtable.html#SQL-CREATETABLE-EXCLUDE
|
||||
"""
|
||||
|
||||
__visit_name__ = 'exclude_constraint'
|
||||
|
||||
where = None
|
||||
|
||||
def __init__(self, *elements, **kw):
|
||||
r"""
|
||||
Create an :class:`.ExcludeConstraint` object.
|
||||
|
||||
E.g.::
|
||||
|
||||
const = ExcludeConstraint(
|
||||
(Column('period'), '&&'),
|
||||
(Column('group'), '='),
|
||||
where=(Column('group') != 'some group')
|
||||
)
|
||||
|
||||
The constraint is normally embedded into the :class:`.Table` construct
|
||||
directly, or added later using :meth:`.append_constraint`::
|
||||
|
||||
some_table = Table(
|
||||
'some_table', metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('period', TSRANGE()),
|
||||
Column('group', String)
|
||||
)
|
||||
|
||||
some_table.append_constraint(
|
||||
ExcludeConstraint(
|
||||
(some_table.c.period, '&&'),
|
||||
(some_table.c.group, '='),
|
||||
where=some_table.c.group != 'some group',
|
||||
name='some_table_excl_const'
|
||||
)
|
||||
)
|
||||
|
||||
:param \*elements:
|
||||
A sequence of two tuples of the form ``(column, operator)`` where
|
||||
"column" is a SQL expression element or a raw SQL string, most
|
||||
typically a :class:`.Column` object,
|
||||
and "operator" is a string containing the operator to use.
|
||||
|
||||
.. note::
|
||||
|
||||
A plain string passed for the value of "column" is interpreted
|
||||
as an arbitrary SQL expression; when passing a plain string,
|
||||
any necessary quoting and escaping syntaxes must be applied
|
||||
manually. In order to specify a column name when a
|
||||
:class:`.Column` object is not available, while ensuring that
|
||||
any necessary quoting rules take effect, an ad-hoc
|
||||
:class:`.Column` or :func:`.sql.expression.column` object may
|
||||
be used.
|
||||
|
||||
:param name:
|
||||
Optional, the in-database name of this constraint.
|
||||
|
||||
:param deferrable:
|
||||
Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
|
||||
issuing DDL for this constraint.
|
||||
|
||||
:param initially:
|
||||
Optional string. If set, emit INITIALLY <value> when issuing DDL
|
||||
for this constraint.
|
||||
|
||||
:param using:
|
||||
Optional string. If set, emit USING <index_method> when issuing DDL
|
||||
for this constraint. Defaults to 'gist'.
|
||||
|
||||
:param where:
|
||||
Optional SQL expression construct or literal SQL string.
|
||||
If set, emit WHERE <predicate> when issuing DDL
|
||||
for this constraint.
|
||||
|
||||
.. note::
|
||||
|
||||
A plain string passed here is interpreted as an arbitrary SQL
|
||||
expression; when passing a plain string, any necessary quoting
|
||||
and escaping syntaxes must be applied manually.
|
||||
|
||||
"""
|
||||
columns = []
|
||||
render_exprs = []
|
||||
self.operators = {}
|
||||
|
||||
expressions, operators = zip(*elements)
|
||||
|
||||
for (expr, column, strname, add_element), operator in zip(
|
||||
self._extract_col_expression_collection(expressions),
|
||||
operators
|
||||
):
|
||||
if add_element is not None:
|
||||
columns.append(add_element)
|
||||
|
||||
name = column.name if column is not None else strname
|
||||
|
||||
if name is not None:
|
||||
# backwards compat
|
||||
self.operators[name] = operator
|
||||
|
||||
expr = expression._literal_as_text(expr)
|
||||
|
||||
render_exprs.append(
|
||||
(expr, name, operator)
|
||||
)
|
||||
|
||||
self._render_exprs = render_exprs
|
||||
ColumnCollectionConstraint.__init__(
|
||||
self,
|
||||
*columns,
|
||||
name=kw.get('name'),
|
||||
deferrable=kw.get('deferrable'),
|
||||
initially=kw.get('initially')
|
||||
)
|
||||
self.using = kw.get('using', 'gist')
|
||||
where = kw.get('where')
|
||||
if where is not None:
|
||||
self.where = expression._literal_as_text(where)
|
||||
|
||||
def copy(self, **kw):
|
||||
elements = [(col, self.operators[col])
|
||||
for col in self.columns.keys()]
|
||||
c = self.__class__(*elements,
|
||||
name=self.name,
|
||||
deferrable=self.deferrable,
|
||||
initially=self.initially,
|
||||
where=self.where,
|
||||
using=self.using)
|
||||
c.dispatch._update(self.dispatch)
|
||||
return c
|
||||
|
||||
|
||||
def array_agg(*arg, **kw):
|
||||
"""PostgreSQL-specific form of :class:`.array_agg`, ensures
|
||||
return type is :class:`.postgresql.ARRAY` and not
|
||||
the plain :class:`.types.ARRAY`.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
kw['type_'] = ARRAY(functions._type_from_args(arg))
|
||||
return functions.func.array_agg(*arg, **kw)
|
|
@ -0,0 +1,420 @@
|
|||
# postgresql/hstore.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import re
|
||||
|
||||
from .base import ischema_names
|
||||
from .array import ARRAY
|
||||
from ... import types as sqltypes
|
||||
from ...sql import functions as sqlfunc
|
||||
from ...sql import operators
|
||||
from ... import util
|
||||
|
||||
__all__ = ('HSTORE', 'hstore')
|
||||
|
||||
idx_precedence = operators._PRECEDENCE[operators.json_getitem_op]
|
||||
|
||||
GETITEM = operators.custom_op(
|
||||
"->", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
HAS_KEY = operators.custom_op(
|
||||
"?", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
HAS_ALL = operators.custom_op(
|
||||
"?&", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
HAS_ANY = operators.custom_op(
|
||||
"?|", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
CONTAINS = operators.custom_op(
|
||||
"@>", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
CONTAINED_BY = operators.custom_op(
|
||||
"<@", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
|
||||
class HSTORE(sqltypes.Indexable, sqltypes.Concatenable, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL HSTORE type.
|
||||
|
||||
The :class:`.HSTORE` type stores dictionaries containing strings, e.g.::
|
||||
|
||||
data_table = Table('data_table', metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('data', HSTORE)
|
||||
)
|
||||
|
||||
with engine.connect() as conn:
|
||||
conn.execute(
|
||||
data_table.insert(),
|
||||
data = {"key1": "value1", "key2": "value2"}
|
||||
)
|
||||
|
||||
:class:`.HSTORE` provides for a wide range of operations, including:
|
||||
|
||||
* Index operations::
|
||||
|
||||
data_table.c.data['some key'] == 'some value'
|
||||
|
||||
* Containment operations::
|
||||
|
||||
data_table.c.data.has_key('some key')
|
||||
|
||||
data_table.c.data.has_all(['one', 'two', 'three'])
|
||||
|
||||
* Concatenation::
|
||||
|
||||
data_table.c.data + {"k1": "v1"}
|
||||
|
||||
For a full list of special methods see
|
||||
:class:`.HSTORE.comparator_factory`.
|
||||
|
||||
For usage with the SQLAlchemy ORM, it may be desirable to combine
|
||||
the usage of :class:`.HSTORE` with :class:`.MutableDict` dictionary
|
||||
now part of the :mod:`sqlalchemy.ext.mutable`
|
||||
extension. This extension will allow "in-place" changes to the
|
||||
dictionary, e.g. addition of new keys or replacement/removal of existing
|
||||
keys to/from the current dictionary, to produce events which will be
|
||||
detected by the unit of work::
|
||||
|
||||
from sqlalchemy.ext.mutable import MutableDict
|
||||
|
||||
class MyClass(Base):
|
||||
__tablename__ = 'data_table'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
data = Column(MutableDict.as_mutable(HSTORE))
|
||||
|
||||
my_object = session.query(MyClass).one()
|
||||
|
||||
# in-place mutation, requires Mutable extension
|
||||
# in order for the ORM to detect
|
||||
my_object.data['some_key'] = 'some value'
|
||||
|
||||
session.commit()
|
||||
|
||||
When the :mod:`sqlalchemy.ext.mutable` extension is not used, the ORM
|
||||
will not be alerted to any changes to the contents of an existing
|
||||
dictionary, unless that dictionary value is re-assigned to the
|
||||
HSTORE-attribute itself, thus generating a change event.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.hstore` - render the PostgreSQL ``hstore()`` function.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'HSTORE'
|
||||
hashable = False
|
||||
text_type = sqltypes.Text()
|
||||
|
||||
def __init__(self, text_type=None):
|
||||
"""Construct a new :class:`.HSTORE`.
|
||||
|
||||
:param text_type: the type that should be used for indexed values.
|
||||
Defaults to :class:`.types.Text`.
|
||||
|
||||
.. versionadded:: 1.1.0
|
||||
|
||||
"""
|
||||
if text_type is not None:
|
||||
self.text_type = text_type
|
||||
|
||||
class Comparator(
|
||||
sqltypes.Indexable.Comparator, sqltypes.Concatenable.Comparator):
|
||||
"""Define comparison operations for :class:`.HSTORE`."""
|
||||
|
||||
def has_key(self, other):
|
||||
"""Boolean expression. Test for presence of a key. Note that the
|
||||
key may be a SQLA expression.
|
||||
"""
|
||||
return self.operate(HAS_KEY, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def has_all(self, other):
|
||||
"""Boolean expression. Test for presence of all keys in jsonb
|
||||
"""
|
||||
return self.operate(HAS_ALL, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def has_any(self, other):
|
||||
"""Boolean expression. Test for presence of any key in jsonb
|
||||
"""
|
||||
return self.operate(HAS_ANY, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def contains(self, other, **kwargs):
|
||||
"""Boolean expression. Test if keys (or array) are a superset
|
||||
of/contained the keys of the argument jsonb expression.
|
||||
"""
|
||||
return self.operate(CONTAINS, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def contained_by(self, other):
|
||||
"""Boolean expression. Test if keys are a proper subset of the
|
||||
keys of the argument jsonb expression.
|
||||
"""
|
||||
return self.operate(
|
||||
CONTAINED_BY, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def _setup_getitem(self, index):
|
||||
return GETITEM, index, self.type.text_type
|
||||
|
||||
def defined(self, key):
|
||||
"""Boolean expression. Test for presence of a non-NULL value for
|
||||
the key. Note that the key may be a SQLA expression.
|
||||
"""
|
||||
return _HStoreDefinedFunction(self.expr, key)
|
||||
|
||||
def delete(self, key):
|
||||
"""HStore expression. Returns the contents of this hstore with the
|
||||
given key deleted. Note that the key may be a SQLA expression.
|
||||
"""
|
||||
if isinstance(key, dict):
|
||||
key = _serialize_hstore(key)
|
||||
return _HStoreDeleteFunction(self.expr, key)
|
||||
|
||||
def slice(self, array):
|
||||
"""HStore expression. Returns a subset of an hstore defined by
|
||||
array of keys.
|
||||
"""
|
||||
return _HStoreSliceFunction(self.expr, array)
|
||||
|
||||
def keys(self):
|
||||
"""Text array expression. Returns array of keys."""
|
||||
return _HStoreKeysFunction(self.expr)
|
||||
|
||||
def vals(self):
|
||||
"""Text array expression. Returns array of values."""
|
||||
return _HStoreValsFunction(self.expr)
|
||||
|
||||
def array(self):
|
||||
"""Text array expression. Returns array of alternating keys and
|
||||
values.
|
||||
"""
|
||||
return _HStoreArrayFunction(self.expr)
|
||||
|
||||
def matrix(self):
|
||||
"""Text array expression. Returns array of [key, value] pairs."""
|
||||
return _HStoreMatrixFunction(self.expr)
|
||||
|
||||
comparator_factory = Comparator
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
if util.py2k:
|
||||
encoding = dialect.encoding
|
||||
|
||||
def process(value):
|
||||
if isinstance(value, dict):
|
||||
return _serialize_hstore(value).encode(encoding)
|
||||
else:
|
||||
return value
|
||||
else:
|
||||
def process(value):
|
||||
if isinstance(value, dict):
|
||||
return _serialize_hstore(value)
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if util.py2k:
|
||||
encoding = dialect.encoding
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return _parse_hstore(value.decode(encoding))
|
||||
else:
|
||||
return value
|
||||
else:
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return _parse_hstore(value)
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
ischema_names['hstore'] = HSTORE
|
||||
|
||||
|
||||
class hstore(sqlfunc.GenericFunction):
|
||||
"""Construct an hstore value within a SQL expression using the
|
||||
PostgreSQL ``hstore()`` function.
|
||||
|
||||
The :class:`.hstore` function accepts one or two arguments as described
|
||||
in the PostgreSQL documentation.
|
||||
|
||||
E.g.::
|
||||
|
||||
from sqlalchemy.dialects.postgresql import array, hstore
|
||||
|
||||
select([hstore('key1', 'value1')])
|
||||
|
||||
select([
|
||||
hstore(
|
||||
array(['key1', 'key2', 'key3']),
|
||||
array(['value1', 'value2', 'value3'])
|
||||
)
|
||||
])
|
||||
|
||||
.. versionadded:: 0.8
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.HSTORE` - the PostgreSQL ``HSTORE`` datatype.
|
||||
|
||||
"""
|
||||
type = HSTORE
|
||||
name = 'hstore'
|
||||
|
||||
|
||||
class _HStoreDefinedFunction(sqlfunc.GenericFunction):
|
||||
type = sqltypes.Boolean
|
||||
name = 'defined'
|
||||
|
||||
|
||||
class _HStoreDeleteFunction(sqlfunc.GenericFunction):
|
||||
type = HSTORE
|
||||
name = 'delete'
|
||||
|
||||
|
||||
class _HStoreSliceFunction(sqlfunc.GenericFunction):
|
||||
type = HSTORE
|
||||
name = 'slice'
|
||||
|
||||
|
||||
class _HStoreKeysFunction(sqlfunc.GenericFunction):
|
||||
type = ARRAY(sqltypes.Text)
|
||||
name = 'akeys'
|
||||
|
||||
|
||||
class _HStoreValsFunction(sqlfunc.GenericFunction):
|
||||
type = ARRAY(sqltypes.Text)
|
||||
name = 'avals'
|
||||
|
||||
|
||||
class _HStoreArrayFunction(sqlfunc.GenericFunction):
|
||||
type = ARRAY(sqltypes.Text)
|
||||
name = 'hstore_to_array'
|
||||
|
||||
|
||||
class _HStoreMatrixFunction(sqlfunc.GenericFunction):
|
||||
type = ARRAY(sqltypes.Text)
|
||||
name = 'hstore_to_matrix'
|
||||
|
||||
|
||||
#
|
||||
# parsing. note that none of this is used with the psycopg2 backend,
|
||||
# which provides its own native extensions.
|
||||
#
|
||||
|
||||
# My best guess at the parsing rules of hstore literals, since no formal
|
||||
# grammar is given. This is mostly reverse engineered from PG's input parser
|
||||
# behavior.
|
||||
HSTORE_PAIR_RE = re.compile(r"""
|
||||
(
|
||||
"(?P<key> (\\ . | [^"])* )" # Quoted key
|
||||
)
|
||||
[ ]* => [ ]* # Pair operator, optional adjoining whitespace
|
||||
(
|
||||
(?P<value_null> NULL ) # NULL value
|
||||
| "(?P<value> (\\ . | [^"])* )" # Quoted value
|
||||
)
|
||||
""", re.VERBOSE)
|
||||
|
||||
HSTORE_DELIMITER_RE = re.compile(r"""
|
||||
[ ]* , [ ]*
|
||||
""", re.VERBOSE)
|
||||
|
||||
|
||||
def _parse_error(hstore_str, pos):
|
||||
"""format an unmarshalling error."""
|
||||
|
||||
ctx = 20
|
||||
hslen = len(hstore_str)
|
||||
|
||||
parsed_tail = hstore_str[max(pos - ctx - 1, 0):min(pos, hslen)]
|
||||
residual = hstore_str[min(pos, hslen):min(pos + ctx + 1, hslen)]
|
||||
|
||||
if len(parsed_tail) > ctx:
|
||||
parsed_tail = '[...]' + parsed_tail[1:]
|
||||
if len(residual) > ctx:
|
||||
residual = residual[:-1] + '[...]'
|
||||
|
||||
return "After %r, could not parse residual at position %d: %r" % (
|
||||
parsed_tail, pos, residual)
|
||||
|
||||
|
||||
def _parse_hstore(hstore_str):
|
||||
"""Parse an hstore from its literal string representation.
|
||||
|
||||
Attempts to approximate PG's hstore input parsing rules as closely as
|
||||
possible. Although currently this is not strictly necessary, since the
|
||||
current implementation of hstore's output syntax is stricter than what it
|
||||
accepts as input, the documentation makes no guarantees that will always
|
||||
be the case.
|
||||
|
||||
|
||||
|
||||
"""
|
||||
result = {}
|
||||
pos = 0
|
||||
pair_match = HSTORE_PAIR_RE.match(hstore_str)
|
||||
|
||||
while pair_match is not None:
|
||||
key = pair_match.group('key').replace(r'\"', '"').replace(
|
||||
"\\\\", "\\")
|
||||
if pair_match.group('value_null'):
|
||||
value = None
|
||||
else:
|
||||
value = pair_match.group('value').replace(
|
||||
r'\"', '"').replace("\\\\", "\\")
|
||||
result[key] = value
|
||||
|
||||
pos += pair_match.end()
|
||||
|
||||
delim_match = HSTORE_DELIMITER_RE.match(hstore_str[pos:])
|
||||
if delim_match is not None:
|
||||
pos += delim_match.end()
|
||||
|
||||
pair_match = HSTORE_PAIR_RE.match(hstore_str[pos:])
|
||||
|
||||
if pos != len(hstore_str):
|
||||
raise ValueError(_parse_error(hstore_str, pos))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _serialize_hstore(val):
|
||||
"""Serialize a dictionary into an hstore literal. Keys and values must
|
||||
both be strings (except None for values).
|
||||
|
||||
"""
|
||||
def esc(s, position):
|
||||
if position == 'value' and s is None:
|
||||
return 'NULL'
|
||||
elif isinstance(s, util.string_types):
|
||||
return '"%s"' % s.replace("\\", "\\\\").replace('"', r'\"')
|
||||
else:
|
||||
raise ValueError("%r in %s position is not a string." %
|
||||
(s, position))
|
||||
|
||||
return ', '.join('%s=>%s' % (esc(k, 'key'), esc(v, 'value'))
|
||||
for k, v in val.items())
|
||||
|
||||
|
|
@ -0,0 +1,301 @@
|
|||
# postgresql/json.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
import collections
|
||||
|
||||
from .base import ischema_names, colspecs
|
||||
from ... import types as sqltypes
|
||||
from ...sql import operators
|
||||
from ...sql import elements
|
||||
from ... import util
|
||||
|
||||
__all__ = ('JSON', 'JSONB')
|
||||
|
||||
idx_precedence = operators._PRECEDENCE[operators.json_getitem_op]
|
||||
|
||||
ASTEXT = operators.custom_op(
|
||||
"->>", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
JSONPATH_ASTEXT = operators.custom_op(
|
||||
"#>>", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
|
||||
HAS_KEY = operators.custom_op(
|
||||
"?", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
HAS_ALL = operators.custom_op(
|
||||
"?&", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
HAS_ANY = operators.custom_op(
|
||||
"?|", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
CONTAINS = operators.custom_op(
|
||||
"@>", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
CONTAINED_BY = operators.custom_op(
|
||||
"<@", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
|
||||
class JSONPathType(sqltypes.JSON.JSONPathType):
|
||||
def bind_processor(self, dialect):
|
||||
super_proc = self.string_bind_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
assert isinstance(value, collections.Sequence)
|
||||
tokens = [util.text_type(elem)for elem in value]
|
||||
value = "{%s}" % (", ".join(tokens))
|
||||
if super_proc:
|
||||
value = super_proc(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def literal_processor(self, dialect):
|
||||
super_proc = self.string_literal_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
assert isinstance(value, collections.Sequence)
|
||||
tokens = [util.text_type(elem)for elem in value]
|
||||
value = "{%s}" % (", ".join(tokens))
|
||||
if super_proc:
|
||||
value = super_proc(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
colspecs[sqltypes.JSON.JSONPathType] = JSONPathType
|
||||
|
||||
|
||||
class JSON(sqltypes.JSON):
|
||||
"""Represent the PostgreSQL JSON type.
|
||||
|
||||
This type is a specialization of the Core-level :class:`.types.JSON`
|
||||
type. Be sure to read the documentation for :class:`.types.JSON` for
|
||||
important tips regarding treatment of NULL values and ORM use.
|
||||
|
||||
.. versionchanged:: 1.1 :class:`.postgresql.JSON` is now a PostgreSQL-
|
||||
specific specialization of the new :class:`.types.JSON` type.
|
||||
|
||||
The operators provided by the PostgreSQL version of :class:`.JSON`
|
||||
include:
|
||||
|
||||
* Index operations (the ``->`` operator)::
|
||||
|
||||
data_table.c.data['some key']
|
||||
|
||||
data_table.c.data[5]
|
||||
|
||||
|
||||
* Index operations returning text (the ``->>`` operator)::
|
||||
|
||||
data_table.c.data['some key'].astext == 'some value'
|
||||
|
||||
* Index operations with CAST
|
||||
(equivalent to ``CAST(col ->> ['some key'] AS <type>)``)::
|
||||
|
||||
data_table.c.data['some key'].astext.cast(Integer) == 5
|
||||
|
||||
* Path index operations (the ``#>`` operator)::
|
||||
|
||||
data_table.c.data[('key_1', 'key_2', 5, ..., 'key_n')]
|
||||
|
||||
* Path index operations returning text (the ``#>>`` operator)::
|
||||
|
||||
data_table.c.data[('key_1', 'key_2', 5, ..., 'key_n')].astext == \
|
||||
'some value'
|
||||
|
||||
.. versionchanged:: 1.1 The :meth:`.ColumnElement.cast` operator on
|
||||
JSON objects now requires that the :attr:`.JSON.Comparator.astext`
|
||||
modifier be called explicitly, if the cast works only from a textual
|
||||
string.
|
||||
|
||||
Index operations return an expression object whose type defaults to
|
||||
:class:`.JSON` by default, so that further JSON-oriented instructions
|
||||
may be called upon the result type.
|
||||
|
||||
Custom serializers and deserializers are specified at the dialect level,
|
||||
that is using :func:`.create_engine`. The reason for this is that when
|
||||
using psycopg2, the DBAPI only allows serializers at the per-cursor
|
||||
or per-connection level. E.g.::
|
||||
|
||||
engine = create_engine("postgresql://scott:tiger@localhost/test",
|
||||
json_serializer=my_serialize_fn,
|
||||
json_deserializer=my_deserialize_fn
|
||||
)
|
||||
|
||||
When using the psycopg2 dialect, the json_deserializer is registered
|
||||
against the database using ``psycopg2.extras.register_default_json``.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.types.JSON` - Core level JSON type
|
||||
|
||||
:class:`.JSONB`
|
||||
|
||||
"""
|
||||
|
||||
astext_type = sqltypes.Text()
|
||||
|
||||
def __init__(self, none_as_null=False, astext_type=None):
|
||||
"""Construct a :class:`.JSON` type.
|
||||
|
||||
:param none_as_null: if True, persist the value ``None`` as a
|
||||
SQL NULL value, not the JSON encoding of ``null``. Note that
|
||||
when this flag is False, the :func:`.null` construct can still
|
||||
be used to persist a NULL value::
|
||||
|
||||
from sqlalchemy import null
|
||||
conn.execute(table.insert(), data=null())
|
||||
|
||||
.. versionchanged:: 0.9.8 - Added ``none_as_null``, and :func:`.null`
|
||||
is now supported in order to persist a NULL value.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.JSON.NULL`
|
||||
|
||||
:param astext_type: the type to use for the
|
||||
:attr:`.JSON.Comparator.astext`
|
||||
accessor on indexed attributes. Defaults to :class:`.types.Text`.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
super(JSON, self).__init__(none_as_null=none_as_null)
|
||||
if astext_type is not None:
|
||||
self.astext_type = astext_type
|
||||
|
||||
class Comparator(sqltypes.JSON.Comparator):
|
||||
"""Define comparison operations for :class:`.JSON`."""
|
||||
|
||||
@property
|
||||
def astext(self):
|
||||
"""On an indexed expression, use the "astext" (e.g. "->>")
|
||||
conversion when rendered in SQL.
|
||||
|
||||
E.g.::
|
||||
|
||||
select([data_table.c.data['some key'].astext])
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.ColumnElement.cast`
|
||||
|
||||
"""
|
||||
|
||||
if isinstance(self.expr.right.type, sqltypes.JSON.JSONPathType):
|
||||
return self.expr.left.operate(
|
||||
JSONPATH_ASTEXT,
|
||||
self.expr.right, result_type=self.type.astext_type)
|
||||
else:
|
||||
return self.expr.left.operate(
|
||||
ASTEXT, self.expr.right, result_type=self.type.astext_type)
|
||||
|
||||
comparator_factory = Comparator
|
||||
|
||||
|
||||
colspecs[sqltypes.JSON] = JSON
|
||||
ischema_names['json'] = JSON
|
||||
|
||||
|
||||
class JSONB(JSON):
|
||||
"""Represent the PostgreSQL JSONB type.
|
||||
|
||||
The :class:`.JSONB` type stores arbitrary JSONB format data, e.g.::
|
||||
|
||||
data_table = Table('data_table', metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('data', JSONB)
|
||||
)
|
||||
|
||||
with engine.connect() as conn:
|
||||
conn.execute(
|
||||
data_table.insert(),
|
||||
data = {"key1": "value1", "key2": "value2"}
|
||||
)
|
||||
|
||||
The :class:`.JSONB` type includes all operations provided by
|
||||
:class:`.JSON`, including the same behaviors for indexing operations.
|
||||
It also adds additional operators specific to JSONB, including
|
||||
:meth:`.JSONB.Comparator.has_key`, :meth:`.JSONB.Comparator.has_all`,
|
||||
:meth:`.JSONB.Comparator.has_any`, :meth:`.JSONB.Comparator.contains`,
|
||||
and :meth:`.JSONB.Comparator.contained_by`.
|
||||
|
||||
Like the :class:`.JSON` type, the :class:`.JSONB` type does not detect
|
||||
in-place changes when used with the ORM, unless the
|
||||
:mod:`sqlalchemy.ext.mutable` extension is used.
|
||||
|
||||
Custom serializers and deserializers
|
||||
are shared with the :class:`.JSON` class, using the ``json_serializer``
|
||||
and ``json_deserializer`` keyword arguments. These must be specified
|
||||
at the dialect level using :func:`.create_engine`. When using
|
||||
psycopg2, the serializers are associated with the jsonb type using
|
||||
``psycopg2.extras.register_default_jsonb`` on a per-connection basis,
|
||||
in the same way that ``psycopg2.extras.register_default_json`` is used
|
||||
to register these handlers with the json type.
|
||||
|
||||
.. versionadded:: 0.9.7
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.JSON`
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'JSONB'
|
||||
|
||||
class Comparator(JSON.Comparator):
|
||||
"""Define comparison operations for :class:`.JSON`."""
|
||||
|
||||
def has_key(self, other):
|
||||
"""Boolean expression. Test for presence of a key. Note that the
|
||||
key may be a SQLA expression.
|
||||
"""
|
||||
return self.operate(HAS_KEY, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def has_all(self, other):
|
||||
"""Boolean expression. Test for presence of all keys in jsonb
|
||||
"""
|
||||
return self.operate(HAS_ALL, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def has_any(self, other):
|
||||
"""Boolean expression. Test for presence of any key in jsonb
|
||||
"""
|
||||
return self.operate(HAS_ANY, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def contains(self, other, **kwargs):
|
||||
"""Boolean expression. Test if keys (or array) are a superset
|
||||
of/contained the keys of the argument jsonb expression.
|
||||
"""
|
||||
return self.operate(CONTAINS, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def contained_by(self, other):
|
||||
"""Boolean expression. Test if keys are a proper subset of the
|
||||
keys of the argument jsonb expression.
|
||||
"""
|
||||
return self.operate(
|
||||
CONTAINED_BY, other, result_type=sqltypes.Boolean)
|
||||
|
||||
comparator_factory = Comparator
|
||||
|
||||
ischema_names['jsonb'] = JSONB
|
|
@ -0,0 +1,295 @@
|
|||
# postgresql/pg8000.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors <see AUTHORS
|
||||
# file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: postgresql+pg8000
|
||||
:name: pg8000
|
||||
:dbapi: pg8000
|
||||
:connectstring: \
|
||||
postgresql+pg8000://user:password@host:port/dbname[?key=value&key=value...]
|
||||
:url: https://pythonhosted.org/pg8000/
|
||||
|
||||
|
||||
.. _pg8000_unicode:
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
pg8000 will encode / decode string values between it and the server using the
|
||||
PostgreSQL ``client_encoding`` parameter; by default this is the value in
|
||||
the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``.
|
||||
Typically, this can be changed to ``utf-8``, as a more useful default::
|
||||
|
||||
#client_encoding = sql_ascii # actually, defaults to database
|
||||
# encoding
|
||||
client_encoding = utf8
|
||||
|
||||
The ``client_encoding`` can be overridden for a session by executing the SQL:
|
||||
|
||||
SET CLIENT_ENCODING TO 'utf8';
|
||||
|
||||
SQLAlchemy will execute this SQL on all new connections based on the value
|
||||
passed to :func:`.create_engine` using the ``client_encoding`` parameter::
|
||||
|
||||
engine = create_engine(
|
||||
"postgresql+pg8000://user:pass@host/dbname", client_encoding='utf8')
|
||||
|
||||
|
||||
.. _pg8000_isolation_level:
|
||||
|
||||
pg8000 Transaction Isolation Level
|
||||
-------------------------------------
|
||||
|
||||
The pg8000 dialect offers the same isolation level settings as that
|
||||
of the :ref:`psycopg2 <psycopg2_isolation_level>` dialect:
|
||||
|
||||
* ``READ COMMITTED``
|
||||
* ``READ UNCOMMITTED``
|
||||
* ``REPEATABLE READ``
|
||||
* ``SERIALIZABLE``
|
||||
* ``AUTOCOMMIT``
|
||||
|
||||
.. versionadded:: 0.9.5 support for AUTOCOMMIT isolation level when using
|
||||
pg8000.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`postgresql_isolation_level`
|
||||
|
||||
:ref:`psycopg2_isolation_level`
|
||||
|
||||
|
||||
"""
|
||||
from ... import util, exc
|
||||
import decimal
|
||||
from ... import processors
|
||||
from ... import types as sqltypes
|
||||
from .base import (
|
||||
PGDialect, PGCompiler, PGIdentifierPreparer, PGExecutionContext,
|
||||
_DECIMAL_TYPES, _FLOAT_TYPES, _INT_TYPES, UUID)
|
||||
import re
|
||||
from sqlalchemy.dialects.postgresql.json import JSON
|
||||
from ...sql.elements import quoted_name
|
||||
|
||||
try:
|
||||
from uuid import UUID as _python_UUID
|
||||
except ImportError:
|
||||
_python_UUID = None
|
||||
|
||||
|
||||
class _PGNumeric(sqltypes.Numeric):
|
||||
def result_processor(self, dialect, coltype):
|
||||
if self.asdecimal:
|
||||
if coltype in _FLOAT_TYPES:
|
||||
return processors.to_decimal_processor_factory(
|
||||
decimal.Decimal, self._effective_decimal_return_scale)
|
||||
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
|
||||
# pg8000 returns Decimal natively for 1700
|
||||
return None
|
||||
else:
|
||||
raise exc.InvalidRequestError(
|
||||
"Unknown PG numeric type: %d" % coltype)
|
||||
else:
|
||||
if coltype in _FLOAT_TYPES:
|
||||
# pg8000 returns float natively for 701
|
||||
return None
|
||||
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
|
||||
return processors.to_float
|
||||
else:
|
||||
raise exc.InvalidRequestError(
|
||||
"Unknown PG numeric type: %d" % coltype)
|
||||
|
||||
|
||||
class _PGNumericNoBind(_PGNumeric):
|
||||
def bind_processor(self, dialect):
|
||||
return None
|
||||
|
||||
|
||||
class _PGJSON(JSON):
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect._dbapi_version > (1, 10, 1):
|
||||
return None # Has native JSON
|
||||
else:
|
||||
return super(_PGJSON, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGUUID(UUID):
|
||||
def bind_processor(self, dialect):
|
||||
if not self.as_uuid:
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = _python_UUID(value)
|
||||
return value
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not self.as_uuid:
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = str(value)
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
class PGExecutionContext_pg8000(PGExecutionContext):
|
||||
pass
|
||||
|
||||
|
||||
class PGCompiler_pg8000(PGCompiler):
|
||||
def visit_mod_binary(self, binary, operator, **kw):
|
||||
return self.process(binary.left, **kw) + " %% " + \
|
||||
self.process(binary.right, **kw)
|
||||
|
||||
def post_process_text(self, text):
|
||||
if '%%' in text:
|
||||
util.warn("The SQLAlchemy postgresql dialect "
|
||||
"now automatically escapes '%' in text() "
|
||||
"expressions to '%%'.")
|
||||
return text.replace('%', '%%')
|
||||
|
||||
|
||||
class PGIdentifierPreparer_pg8000(PGIdentifierPreparer):
|
||||
def _escape_identifier(self, value):
|
||||
value = value.replace(self.escape_quote, self.escape_to_quote)
|
||||
return value.replace('%', '%%')
|
||||
|
||||
|
||||
class PGDialect_pg8000(PGDialect):
|
||||
driver = 'pg8000'
|
||||
|
||||
supports_unicode_statements = True
|
||||
|
||||
supports_unicode_binds = True
|
||||
|
||||
default_paramstyle = 'format'
|
||||
supports_sane_multi_rowcount = True
|
||||
execution_ctx_cls = PGExecutionContext_pg8000
|
||||
statement_compiler = PGCompiler_pg8000
|
||||
preparer = PGIdentifierPreparer_pg8000
|
||||
description_encoding = 'use_encoding'
|
||||
|
||||
colspecs = util.update_copy(
|
||||
PGDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: _PGNumericNoBind,
|
||||
sqltypes.Float: _PGNumeric,
|
||||
JSON: _PGJSON,
|
||||
sqltypes.JSON: _PGJSON,
|
||||
UUID: _PGUUID
|
||||
}
|
||||
)
|
||||
|
||||
def __init__(self, client_encoding=None, **kwargs):
|
||||
PGDialect.__init__(self, **kwargs)
|
||||
self.client_encoding = client_encoding
|
||||
|
||||
def initialize(self, connection):
|
||||
self.supports_sane_multi_rowcount = self._dbapi_version >= (1, 9, 14)
|
||||
super(PGDialect_pg8000, self).initialize(connection)
|
||||
|
||||
@util.memoized_property
|
||||
def _dbapi_version(self):
|
||||
if self.dbapi and hasattr(self.dbapi, '__version__'):
|
||||
return tuple(
|
||||
[
|
||||
int(x) for x in re.findall(
|
||||
r'(\d+)(?:[-\.]?|$)', self.dbapi.__version__)])
|
||||
else:
|
||||
return (99, 99, 99)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('pg8000')
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
if 'port' in opts:
|
||||
opts['port'] = int(opts['port'])
|
||||
opts.update(url.query)
|
||||
return ([], opts)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
return "connection is closed" in str(e)
|
||||
|
||||
def set_isolation_level(self, connection, level):
|
||||
level = level.replace('_', ' ')
|
||||
|
||||
# adjust for ConnectionFairy possibly being present
|
||||
if hasattr(connection, 'connection'):
|
||||
connection = connection.connection
|
||||
|
||||
if level == 'AUTOCOMMIT':
|
||||
connection.autocommit = True
|
||||
elif level in self._isolation_lookup:
|
||||
connection.autocommit = False
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(
|
||||
"SET SESSION CHARACTERISTICS AS TRANSACTION "
|
||||
"ISOLATION LEVEL %s" % level)
|
||||
cursor.execute("COMMIT")
|
||||
cursor.close()
|
||||
else:
|
||||
raise exc.ArgumentError(
|
||||
"Invalid value '%s' for isolation_level. "
|
||||
"Valid isolation levels for %s are %s or AUTOCOMMIT" %
|
||||
(level, self.name, ", ".join(self._isolation_lookup))
|
||||
)
|
||||
|
||||
def set_client_encoding(self, connection, client_encoding):
|
||||
# adjust for ConnectionFairy possibly being present
|
||||
if hasattr(connection, 'connection'):
|
||||
connection = connection.connection
|
||||
|
||||
cursor = connection.cursor()
|
||||
cursor.execute("SET CLIENT_ENCODING TO '" + client_encoding + "'")
|
||||
cursor.execute("COMMIT")
|
||||
cursor.close()
|
||||
|
||||
def do_begin_twophase(self, connection, xid):
|
||||
connection.connection.tpc_begin((0, xid, ''))
|
||||
|
||||
def do_prepare_twophase(self, connection, xid):
|
||||
connection.connection.tpc_prepare()
|
||||
|
||||
def do_rollback_twophase(
|
||||
self, connection, xid, is_prepared=True, recover=False):
|
||||
connection.connection.tpc_rollback((0, xid, ''))
|
||||
|
||||
def do_commit_twophase(
|
||||
self, connection, xid, is_prepared=True, recover=False):
|
||||
connection.connection.tpc_commit((0, xid, ''))
|
||||
|
||||
def do_recover_twophase(self, connection):
|
||||
return [row[1] for row in connection.connection.tpc_recover()]
|
||||
|
||||
def on_connect(self):
|
||||
fns = []
|
||||
|
||||
def on_connect(conn):
|
||||
conn.py_types[quoted_name] = conn.py_types[util.text_type]
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.client_encoding is not None:
|
||||
def on_connect(conn):
|
||||
self.set_client_encoding(conn, self.client_encoding)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.isolation_level is not None:
|
||||
def on_connect(conn):
|
||||
self.set_isolation_level(conn, self.isolation_level)
|
||||
fns.append(on_connect)
|
||||
|
||||
if len(fns) > 0:
|
||||
def on_connect(conn):
|
||||
for fn in fns:
|
||||
fn(conn)
|
||||
return on_connect
|
||||
else:
|
||||
return None
|
||||
|
||||
dialect = PGDialect_pg8000
|
|
@ -0,0 +1,740 @@
|
|||
# postgresql/psycopg2.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
r"""
|
||||
.. dialect:: postgresql+psycopg2
|
||||
:name: psycopg2
|
||||
:dbapi: psycopg2
|
||||
:connectstring: postgresql+psycopg2://user:password@host:port/dbname[?key=value&key=value...]
|
||||
:url: http://pypi.python.org/pypi/psycopg2/
|
||||
|
||||
psycopg2 Connect Arguments
|
||||
-----------------------------------
|
||||
|
||||
psycopg2-specific keyword arguments which are accepted by
|
||||
:func:`.create_engine()` are:
|
||||
|
||||
* ``server_side_cursors``: Enable the usage of "server side cursors" for SQL
|
||||
statements which support this feature. What this essentially means from a
|
||||
psycopg2 point of view is that the cursor is created using a name, e.g.
|
||||
``connection.cursor('some name')``, which has the effect that result rows
|
||||
are not immediately pre-fetched and buffered after statement execution, but
|
||||
are instead left on the server and only retrieved as needed. SQLAlchemy's
|
||||
:class:`~sqlalchemy.engine.ResultProxy` uses special row-buffering
|
||||
behavior when this feature is enabled, such that groups of 100 rows at a
|
||||
time are fetched over the wire to reduce conversational overhead.
|
||||
Note that the :paramref:`.Connection.execution_options.stream_results`
|
||||
execution option is a more targeted
|
||||
way of enabling this mode on a per-execution basis.
|
||||
|
||||
* ``use_native_unicode``: Enable the usage of Psycopg2 "native unicode" mode
|
||||
per connection. True by default.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`psycopg2_disable_native_unicode`
|
||||
|
||||
* ``isolation_level``: This option, available for all PostgreSQL dialects,
|
||||
includes the ``AUTOCOMMIT`` isolation level when using the psycopg2
|
||||
dialect.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`psycopg2_isolation_level`
|
||||
|
||||
* ``client_encoding``: sets the client encoding in a libpq-agnostic way,
|
||||
using psycopg2's ``set_client_encoding()`` method.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`psycopg2_unicode`
|
||||
|
||||
* ``use_batch_mode``: This flag allows ``psycopg2.extras.execute_batch``
|
||||
for ``cursor.executemany()`` calls performed by the :class:`.Engine`.
|
||||
It is currently experimental but
|
||||
may well become True by default as it is critical for executemany
|
||||
performance.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`psycopg2_batch_mode`
|
||||
|
||||
Unix Domain Connections
|
||||
------------------------
|
||||
|
||||
psycopg2 supports connecting via Unix domain connections. When the ``host``
|
||||
portion of the URL is omitted, SQLAlchemy passes ``None`` to psycopg2,
|
||||
which specifies Unix-domain communication rather than TCP/IP communication::
|
||||
|
||||
create_engine("postgresql+psycopg2://user:password@/dbname")
|
||||
|
||||
By default, the socket file used is to connect to a Unix-domain socket
|
||||
in ``/tmp``, or whatever socket directory was specified when PostgreSQL
|
||||
was built. This value can be overridden by passing a pathname to psycopg2,
|
||||
using ``host`` as an additional keyword argument::
|
||||
|
||||
create_engine("postgresql+psycopg2://user:password@/dbname?\
|
||||
host=/var/lib/postgresql")
|
||||
|
||||
.. seealso::
|
||||
|
||||
`PQconnectdbParams <http://www.postgresql.org/docs/9.1/static/libpq-connect.html#LIBPQ-PQCONNECTDBPARAMS>`_
|
||||
|
||||
.. _psycopg2_execution_options:
|
||||
|
||||
Per-Statement/Connection Execution Options
|
||||
-------------------------------------------
|
||||
|
||||
The following DBAPI-specific options are respected when used with
|
||||
:meth:`.Connection.execution_options`, :meth:`.Executable.execution_options`,
|
||||
:meth:`.Query.execution_options`, in addition to those not specific to DBAPIs:
|
||||
|
||||
* ``isolation_level`` - Set the transaction isolation level for the lifespan of a
|
||||
:class:`.Connection` (can only be set on a connection, not a statement
|
||||
or query). See :ref:`psycopg2_isolation_level`.
|
||||
|
||||
* ``stream_results`` - Enable or disable usage of psycopg2 server side cursors -
|
||||
this feature makes use of "named" cursors in combination with special
|
||||
result handling methods so that result rows are not fully buffered.
|
||||
If ``None`` or not set, the ``server_side_cursors`` option of the
|
||||
:class:`.Engine` is used.
|
||||
|
||||
* ``max_row_buffer`` - when using ``stream_results``, an integer value that
|
||||
specifies the maximum number of rows to buffer at a time. This is
|
||||
interpreted by the :class:`.BufferedRowResultProxy`, and if omitted the
|
||||
buffer will grow to ultimately store 1000 rows at a time.
|
||||
|
||||
.. versionadded:: 1.0.6
|
||||
|
||||
.. _psycopg2_batch_mode:
|
||||
|
||||
Psycopg2 Batch Mode (Fast Execution)
|
||||
------------------------------------
|
||||
|
||||
Modern versions of psycopg2 include a feature known as
|
||||
`Fast Execution Helpers <http://initd.org/psycopg/docs/extras.html#fast-execution-helpers>`_,
|
||||
which have been shown in benchmarking to improve psycopg2's executemany()
|
||||
performance with INSERTS by multiple orders of magnitude. SQLAlchemy
|
||||
allows this extension to be used for all ``executemany()`` style calls
|
||||
invoked by an :class:`.Engine` when used with multiple parameter sets,
|
||||
by adding the ``use_batch_mode`` flag to :func:`.create_engine`::
|
||||
|
||||
engine = create_engine(
|
||||
"postgresql+psycopg2://scott:tiger@host/dbname",
|
||||
use_batch_mode=True)
|
||||
|
||||
Batch mode is considered to be **experimental** at this time, however may
|
||||
be enabled by default in a future release.
|
||||
|
||||
|
||||
.. versionadded:: 1.2.0
|
||||
|
||||
|
||||
|
||||
.. _psycopg2_unicode:
|
||||
|
||||
Unicode with Psycopg2
|
||||
----------------------
|
||||
|
||||
By default, the psycopg2 driver uses the ``psycopg2.extensions.UNICODE``
|
||||
extension, such that the DBAPI receives and returns all strings as Python
|
||||
Unicode objects directly - SQLAlchemy passes these values through without
|
||||
change. Psycopg2 here will encode/decode string values based on the
|
||||
current "client encoding" setting; by default this is the value in
|
||||
the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``.
|
||||
Typically, this can be changed to ``utf8``, as a more useful default::
|
||||
|
||||
# postgresql.conf file
|
||||
|
||||
# client_encoding = sql_ascii # actually, defaults to database
|
||||
# encoding
|
||||
client_encoding = utf8
|
||||
|
||||
A second way to affect the client encoding is to set it within Psycopg2
|
||||
locally. SQLAlchemy will call psycopg2's
|
||||
:meth:`psycopg2:connection.set_client_encoding` method
|
||||
on all new connections based on the value passed to
|
||||
:func:`.create_engine` using the ``client_encoding`` parameter::
|
||||
|
||||
# set_client_encoding() setting;
|
||||
# works for *all* PostgreSQL versions
|
||||
engine = create_engine("postgresql://user:pass@host/dbname",
|
||||
client_encoding='utf8')
|
||||
|
||||
This overrides the encoding specified in the PostgreSQL client configuration.
|
||||
When using the parameter in this way, the psycopg2 driver emits
|
||||
``SET client_encoding TO 'utf8'`` on the connection explicitly, and works
|
||||
in all PostgreSQL versions.
|
||||
|
||||
Note that the ``client_encoding`` setting as passed to :func:`.create_engine`
|
||||
is **not the same** as the more recently added ``client_encoding`` parameter
|
||||
now supported by libpq directly. This is enabled when ``client_encoding``
|
||||
is passed directly to ``psycopg2.connect()``, and from SQLAlchemy is passed
|
||||
using the :paramref:`.create_engine.connect_args` parameter::
|
||||
|
||||
# libpq direct parameter setting;
|
||||
# only works for PostgreSQL **9.1 and above**
|
||||
engine = create_engine("postgresql://user:pass@host/dbname",
|
||||
connect_args={'client_encoding': 'utf8'})
|
||||
|
||||
# using the query string is equivalent
|
||||
engine = create_engine("postgresql://user:pass@host/dbname?client_encoding=utf8")
|
||||
|
||||
The above parameter was only added to libpq as of version 9.1 of PostgreSQL,
|
||||
so using the previous method is better for cross-version support.
|
||||
|
||||
.. _psycopg2_disable_native_unicode:
|
||||
|
||||
Disabling Native Unicode
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
SQLAlchemy can also be instructed to skip the usage of the psycopg2
|
||||
``UNICODE`` extension and to instead utilize its own unicode encode/decode
|
||||
services, which are normally reserved only for those DBAPIs that don't
|
||||
fully support unicode directly. Passing ``use_native_unicode=False`` to
|
||||
:func:`.create_engine` will disable usage of ``psycopg2.extensions.UNICODE``.
|
||||
SQLAlchemy will instead encode data itself into Python bytestrings on the way
|
||||
in and coerce from bytes on the way back,
|
||||
using the value of the :func:`.create_engine` ``encoding`` parameter, which
|
||||
defaults to ``utf-8``.
|
||||
SQLAlchemy's own unicode encode/decode functionality is steadily becoming
|
||||
obsolete as most DBAPIs now support unicode fully.
|
||||
|
||||
Bound Parameter Styles
|
||||
----------------------
|
||||
|
||||
The default parameter style for the psycopg2 dialect is "pyformat", where
|
||||
SQL is rendered using ``%(paramname)s`` style. This format has the limitation
|
||||
that it does not accommodate the unusual case of parameter names that
|
||||
actually contain percent or parenthesis symbols; as SQLAlchemy in many cases
|
||||
generates bound parameter names based on the name of a column, the presence
|
||||
of these characters in a column name can lead to problems.
|
||||
|
||||
There are two solutions to the issue of a :class:`.schema.Column` that contains
|
||||
one of these characters in its name. One is to specify the
|
||||
:paramref:`.schema.Column.key` for columns that have such names::
|
||||
|
||||
measurement = Table('measurement', metadata,
|
||||
Column('Size (meters)', Integer, key='size_meters')
|
||||
)
|
||||
|
||||
Above, an INSERT statement such as ``measurement.insert()`` will use
|
||||
``size_meters`` as the parameter name, and a SQL expression such as
|
||||
``measurement.c.size_meters > 10`` will derive the bound parameter name
|
||||
from the ``size_meters`` key as well.
|
||||
|
||||
.. versionchanged:: 1.0.0 - SQL expressions will use :attr:`.Column.key`
|
||||
as the source of naming when anonymous bound parameters are created
|
||||
in SQL expressions; previously, this behavior only applied to
|
||||
:meth:`.Table.insert` and :meth:`.Table.update` parameter names.
|
||||
|
||||
The other solution is to use a positional format; psycopg2 allows use of the
|
||||
"format" paramstyle, which can be passed to
|
||||
:paramref:`.create_engine.paramstyle`::
|
||||
|
||||
engine = create_engine(
|
||||
'postgresql://scott:tiger@localhost:5432/test', paramstyle='format')
|
||||
|
||||
With the above engine, instead of a statement like::
|
||||
|
||||
INSERT INTO measurement ("Size (meters)") VALUES (%(Size (meters))s)
|
||||
{'Size (meters)': 1}
|
||||
|
||||
we instead see::
|
||||
|
||||
INSERT INTO measurement ("Size (meters)") VALUES (%s)
|
||||
(1, )
|
||||
|
||||
Where above, the dictionary style is converted into a tuple with positional
|
||||
style.
|
||||
|
||||
|
||||
Transactions
|
||||
------------
|
||||
|
||||
The psycopg2 dialect fully supports SAVEPOINT and two-phase commit operations.
|
||||
|
||||
.. _psycopg2_isolation_level:
|
||||
|
||||
Psycopg2 Transaction Isolation Level
|
||||
-------------------------------------
|
||||
|
||||
As discussed in :ref:`postgresql_isolation_level`,
|
||||
all PostgreSQL dialects support setting of transaction isolation level
|
||||
both via the ``isolation_level`` parameter passed to :func:`.create_engine`,
|
||||
as well as the ``isolation_level`` argument used by
|
||||
:meth:`.Connection.execution_options`. When using the psycopg2 dialect, these
|
||||
options make use of psycopg2's ``set_isolation_level()`` connection method,
|
||||
rather than emitting a PostgreSQL directive; this is because psycopg2's
|
||||
API-level setting is always emitted at the start of each transaction in any
|
||||
case.
|
||||
|
||||
The psycopg2 dialect supports these constants for isolation level:
|
||||
|
||||
* ``READ COMMITTED``
|
||||
* ``READ UNCOMMITTED``
|
||||
* ``REPEATABLE READ``
|
||||
* ``SERIALIZABLE``
|
||||
* ``AUTOCOMMIT``
|
||||
|
||||
.. versionadded:: 0.8.2 support for AUTOCOMMIT isolation level when using
|
||||
psycopg2.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`postgresql_isolation_level`
|
||||
|
||||
:ref:`pg8000_isolation_level`
|
||||
|
||||
|
||||
NOTICE logging
|
||||
---------------
|
||||
|
||||
The psycopg2 dialect will log PostgreSQL NOTICE messages via the
|
||||
``sqlalchemy.dialects.postgresql`` logger::
|
||||
|
||||
import logging
|
||||
logging.getLogger('sqlalchemy.dialects.postgresql').setLevel(logging.INFO)
|
||||
|
||||
.. _psycopg2_hstore::
|
||||
|
||||
HSTORE type
|
||||
------------
|
||||
|
||||
The ``psycopg2`` DBAPI includes an extension to natively handle marshalling of
|
||||
the HSTORE type. The SQLAlchemy psycopg2 dialect will enable this extension
|
||||
by default when psycopg2 version 2.4 or greater is used, and
|
||||
it is detected that the target database has the HSTORE type set up for use.
|
||||
In other words, when the dialect makes the first
|
||||
connection, a sequence like the following is performed:
|
||||
|
||||
1. Request the available HSTORE oids using
|
||||
``psycopg2.extras.HstoreAdapter.get_oids()``.
|
||||
If this function returns a list of HSTORE identifiers, we then determine
|
||||
that the ``HSTORE`` extension is present.
|
||||
This function is **skipped** if the version of psycopg2 installed is
|
||||
less than version 2.4.
|
||||
|
||||
2. If the ``use_native_hstore`` flag is at its default of ``True``, and
|
||||
we've detected that ``HSTORE`` oids are available, the
|
||||
``psycopg2.extensions.register_hstore()`` extension is invoked for all
|
||||
connections.
|
||||
|
||||
The ``register_hstore()`` extension has the effect of **all Python
|
||||
dictionaries being accepted as parameters regardless of the type of target
|
||||
column in SQL**. The dictionaries are converted by this extension into a
|
||||
textual HSTORE expression. If this behavior is not desired, disable the
|
||||
use of the hstore extension by setting ``use_native_hstore`` to ``False`` as
|
||||
follows::
|
||||
|
||||
engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test",
|
||||
use_native_hstore=False)
|
||||
|
||||
The ``HSTORE`` type is **still supported** when the
|
||||
``psycopg2.extensions.register_hstore()`` extension is not used. It merely
|
||||
means that the coercion between Python dictionaries and the HSTORE
|
||||
string format, on both the parameter side and the result side, will take
|
||||
place within SQLAlchemy's own marshalling logic, and not that of ``psycopg2``
|
||||
which may be more performant.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
import logging
|
||||
|
||||
from ... import util, exc
|
||||
import decimal
|
||||
from ... import processors
|
||||
from ...engine import result as _result
|
||||
from ...sql import expression
|
||||
from ... import types as sqltypes
|
||||
from .base import PGDialect, PGCompiler, \
|
||||
PGIdentifierPreparer, PGExecutionContext, \
|
||||
ENUM, _DECIMAL_TYPES, _FLOAT_TYPES,\
|
||||
_INT_TYPES, UUID
|
||||
from .hstore import HSTORE
|
||||
from .json import JSON, JSONB
|
||||
|
||||
try:
|
||||
from uuid import UUID as _python_UUID
|
||||
except ImportError:
|
||||
_python_UUID = None
|
||||
|
||||
|
||||
logger = logging.getLogger('sqlalchemy.dialects.postgresql')
|
||||
|
||||
|
||||
class _PGNumeric(sqltypes.Numeric):
|
||||
def bind_processor(self, dialect):
|
||||
return None
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if self.asdecimal:
|
||||
if coltype in _FLOAT_TYPES:
|
||||
return processors.to_decimal_processor_factory(
|
||||
decimal.Decimal,
|
||||
self._effective_decimal_return_scale)
|
||||
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
|
||||
# pg8000 returns Decimal natively for 1700
|
||||
return None
|
||||
else:
|
||||
raise exc.InvalidRequestError(
|
||||
"Unknown PG numeric type: %d" % coltype)
|
||||
else:
|
||||
if coltype in _FLOAT_TYPES:
|
||||
# pg8000 returns float natively for 701
|
||||
return None
|
||||
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
|
||||
return processors.to_float
|
||||
else:
|
||||
raise exc.InvalidRequestError(
|
||||
"Unknown PG numeric type: %d" % coltype)
|
||||
|
||||
|
||||
class _PGEnum(ENUM):
|
||||
def result_processor(self, dialect, coltype):
|
||||
if util.py2k and self.convert_unicode is True:
|
||||
# we can't easily use PG's extensions here because
|
||||
# the OID is on the fly, and we need to give it a python
|
||||
# function anyway - not really worth it.
|
||||
self.convert_unicode = "force_nocheck"
|
||||
return super(_PGEnum, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGHStore(HSTORE):
|
||||
def bind_processor(self, dialect):
|
||||
if dialect._has_native_hstore:
|
||||
return None
|
||||
else:
|
||||
return super(_PGHStore, self).bind_processor(dialect)
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect._has_native_hstore:
|
||||
return None
|
||||
else:
|
||||
return super(_PGHStore, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGJSON(JSON):
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect._has_native_json:
|
||||
return None
|
||||
else:
|
||||
return super(_PGJSON, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGJSONB(JSONB):
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect._has_native_jsonb:
|
||||
return None
|
||||
else:
|
||||
return super(_PGJSONB, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGUUID(UUID):
|
||||
def bind_processor(self, dialect):
|
||||
if not self.as_uuid and dialect.use_native_uuid:
|
||||
nonetype = type(None)
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = _python_UUID(value)
|
||||
return value
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not self.as_uuid and dialect.use_native_uuid:
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = str(value)
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
_server_side_id = util.counter()
|
||||
|
||||
|
||||
class PGExecutionContext_psycopg2(PGExecutionContext):
|
||||
def create_server_side_cursor(self):
|
||||
# use server-side cursors:
|
||||
# http://lists.initd.org/pipermail/psycopg/2007-January/005251.html
|
||||
ident = "c_%s_%s" % (hex(id(self))[2:],
|
||||
hex(_server_side_id())[2:])
|
||||
return self._dbapi_connection.cursor(ident)
|
||||
|
||||
def get_result_proxy(self):
|
||||
# TODO: ouch
|
||||
if logger.isEnabledFor(logging.INFO):
|
||||
self._log_notices(self.cursor)
|
||||
|
||||
if self._is_server_side:
|
||||
return _result.BufferedRowResultProxy(self)
|
||||
else:
|
||||
return _result.ResultProxy(self)
|
||||
|
||||
def _log_notices(self, cursor):
|
||||
for notice in cursor.connection.notices:
|
||||
# NOTICE messages have a
|
||||
# newline character at the end
|
||||
logger.info(notice.rstrip())
|
||||
|
||||
cursor.connection.notices[:] = []
|
||||
|
||||
|
||||
class PGCompiler_psycopg2(PGCompiler):
|
||||
pass
|
||||
|
||||
|
||||
class PGIdentifierPreparer_psycopg2(PGIdentifierPreparer):
|
||||
pass
|
||||
|
||||
|
||||
class PGDialect_psycopg2(PGDialect):
|
||||
driver = 'psycopg2'
|
||||
if util.py2k:
|
||||
supports_unicode_statements = False
|
||||
|
||||
supports_server_side_cursors = True
|
||||
|
||||
default_paramstyle = 'pyformat'
|
||||
# set to true based on psycopg2 version
|
||||
supports_sane_multi_rowcount = False
|
||||
execution_ctx_cls = PGExecutionContext_psycopg2
|
||||
statement_compiler = PGCompiler_psycopg2
|
||||
preparer = PGIdentifierPreparer_psycopg2
|
||||
psycopg2_version = (0, 0)
|
||||
|
||||
FEATURE_VERSION_MAP = dict(
|
||||
native_json=(2, 5),
|
||||
native_jsonb=(2, 5, 4),
|
||||
sane_multi_rowcount=(2, 0, 9),
|
||||
array_oid=(2, 4, 3),
|
||||
hstore_adapter=(2, 4)
|
||||
)
|
||||
|
||||
_has_native_hstore = False
|
||||
_has_native_json = False
|
||||
_has_native_jsonb = False
|
||||
|
||||
engine_config_types = PGDialect.engine_config_types.union([
|
||||
('use_native_unicode', util.asbool),
|
||||
])
|
||||
|
||||
colspecs = util.update_copy(
|
||||
PGDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: _PGNumeric,
|
||||
ENUM: _PGEnum, # needs force_unicode
|
||||
sqltypes.Enum: _PGEnum, # needs force_unicode
|
||||
HSTORE: _PGHStore,
|
||||
JSON: _PGJSON,
|
||||
sqltypes.JSON: _PGJSON,
|
||||
JSONB: _PGJSONB,
|
||||
UUID: _PGUUID
|
||||
}
|
||||
)
|
||||
|
||||
def __init__(self, server_side_cursors=False, use_native_unicode=True,
|
||||
client_encoding=None,
|
||||
use_native_hstore=True, use_native_uuid=True,
|
||||
use_batch_mode=False,
|
||||
**kwargs):
|
||||
PGDialect.__init__(self, **kwargs)
|
||||
self.server_side_cursors = server_side_cursors
|
||||
self.use_native_unicode = use_native_unicode
|
||||
self.use_native_hstore = use_native_hstore
|
||||
self.use_native_uuid = use_native_uuid
|
||||
self.supports_unicode_binds = use_native_unicode
|
||||
self.client_encoding = client_encoding
|
||||
self.psycopg2_batch_mode = use_batch_mode
|
||||
if self.dbapi and hasattr(self.dbapi, '__version__'):
|
||||
m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?',
|
||||
self.dbapi.__version__)
|
||||
if m:
|
||||
self.psycopg2_version = tuple(
|
||||
int(x)
|
||||
for x in m.group(1, 2, 3)
|
||||
if x is not None)
|
||||
|
||||
def initialize(self, connection):
|
||||
super(PGDialect_psycopg2, self).initialize(connection)
|
||||
self._has_native_hstore = self.use_native_hstore and \
|
||||
self._hstore_oids(connection.connection) \
|
||||
is not None
|
||||
self._has_native_json = \
|
||||
self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_json']
|
||||
self._has_native_jsonb = \
|
||||
self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_jsonb']
|
||||
|
||||
# http://initd.org/psycopg/docs/news.html#what-s-new-in-psycopg-2-0-9
|
||||
self.supports_sane_multi_rowcount = \
|
||||
self.psycopg2_version >= \
|
||||
self.FEATURE_VERSION_MAP['sane_multi_rowcount'] and \
|
||||
not self.psycopg2_batch_mode
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
import psycopg2
|
||||
return psycopg2
|
||||
|
||||
@classmethod
|
||||
def _psycopg2_extensions(cls):
|
||||
from psycopg2 import extensions
|
||||
return extensions
|
||||
|
||||
@classmethod
|
||||
def _psycopg2_extras(cls):
|
||||
from psycopg2 import extras
|
||||
return extras
|
||||
|
||||
@util.memoized_property
|
||||
def _isolation_lookup(self):
|
||||
extensions = self._psycopg2_extensions()
|
||||
return {
|
||||
'AUTOCOMMIT': extensions.ISOLATION_LEVEL_AUTOCOMMIT,
|
||||
'READ COMMITTED': extensions.ISOLATION_LEVEL_READ_COMMITTED,
|
||||
'READ UNCOMMITTED': extensions.ISOLATION_LEVEL_READ_UNCOMMITTED,
|
||||
'REPEATABLE READ': extensions.ISOLATION_LEVEL_REPEATABLE_READ,
|
||||
'SERIALIZABLE': extensions.ISOLATION_LEVEL_SERIALIZABLE
|
||||
}
|
||||
|
||||
def set_isolation_level(self, connection, level):
|
||||
try:
|
||||
level = self._isolation_lookup[level.replace('_', ' ')]
|
||||
except KeyError:
|
||||
raise exc.ArgumentError(
|
||||
"Invalid value '%s' for isolation_level. "
|
||||
"Valid isolation levels for %s are %s" %
|
||||
(level, self.name, ", ".join(self._isolation_lookup))
|
||||
)
|
||||
|
||||
connection.set_isolation_level(level)
|
||||
|
||||
def on_connect(self):
|
||||
extras = self._psycopg2_extras()
|
||||
extensions = self._psycopg2_extensions()
|
||||
|
||||
fns = []
|
||||
if self.client_encoding is not None:
|
||||
def on_connect(conn):
|
||||
conn.set_client_encoding(self.client_encoding)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.isolation_level is not None:
|
||||
def on_connect(conn):
|
||||
self.set_isolation_level(conn, self.isolation_level)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.dbapi and self.use_native_uuid:
|
||||
def on_connect(conn):
|
||||
extras.register_uuid(None, conn)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.dbapi and self.use_native_unicode:
|
||||
def on_connect(conn):
|
||||
extensions.register_type(extensions.UNICODE, conn)
|
||||
extensions.register_type(extensions.UNICODEARRAY, conn)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.dbapi and self.use_native_hstore:
|
||||
def on_connect(conn):
|
||||
hstore_oids = self._hstore_oids(conn)
|
||||
if hstore_oids is not None:
|
||||
oid, array_oid = hstore_oids
|
||||
kw = {'oid': oid}
|
||||
if util.py2k:
|
||||
kw['unicode'] = True
|
||||
if self.psycopg2_version >= \
|
||||
self.FEATURE_VERSION_MAP['array_oid']:
|
||||
kw['array_oid'] = array_oid
|
||||
extras.register_hstore(conn, **kw)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.dbapi and self._json_deserializer:
|
||||
def on_connect(conn):
|
||||
if self._has_native_json:
|
||||
extras.register_default_json(
|
||||
conn, loads=self._json_deserializer)
|
||||
if self._has_native_jsonb:
|
||||
extras.register_default_jsonb(
|
||||
conn, loads=self._json_deserializer)
|
||||
fns.append(on_connect)
|
||||
|
||||
if fns:
|
||||
def on_connect(conn):
|
||||
for fn in fns:
|
||||
fn(conn)
|
||||
return on_connect
|
||||
else:
|
||||
return None
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context=None):
|
||||
if self.psycopg2_batch_mode:
|
||||
extras = self._psycopg2_extras()
|
||||
extras.execute_batch(cursor, statement, parameters)
|
||||
else:
|
||||
cursor.executemany(statement, parameters)
|
||||
|
||||
@util.memoized_instancemethod
|
||||
def _hstore_oids(self, conn):
|
||||
if self.psycopg2_version >= self.FEATURE_VERSION_MAP['hstore_adapter']:
|
||||
extras = self._psycopg2_extras()
|
||||
oids = extras.HstoreAdapter.get_oids(conn)
|
||||
if oids is not None and oids[0]:
|
||||
return oids[0:2]
|
||||
return None
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
if 'port' in opts:
|
||||
opts['port'] = int(opts['port'])
|
||||
opts.update(url.query)
|
||||
return ([], opts)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, self.dbapi.Error):
|
||||
# check the "closed" flag. this might not be
|
||||
# present on old psycopg2 versions. Also,
|
||||
# this flag doesn't actually help in a lot of disconnect
|
||||
# situations, so don't rely on it.
|
||||
if getattr(connection, 'closed', False):
|
||||
return True
|
||||
|
||||
# checks based on strings. in the case that .closed
|
||||
# didn't cut it, fall back onto these.
|
||||
str_e = str(e).partition("\n")[0]
|
||||
for msg in [
|
||||
# these error messages from libpq: interfaces/libpq/fe-misc.c
|
||||
# and interfaces/libpq/fe-secure.c.
|
||||
'terminating connection',
|
||||
'closed the connection',
|
||||
'connection not open',
|
||||
'could not receive data from server',
|
||||
'could not send data to server',
|
||||
# psycopg2 client errors, psycopg2/conenction.h,
|
||||
# psycopg2/cursor.h
|
||||
'connection already closed',
|
||||
'cursor already closed',
|
||||
# not sure where this path is originally from, it may
|
||||
# be obsolete. It really says "losed", not "closed".
|
||||
'losed the connection unexpectedly',
|
||||
# these can occur in newer SSL
|
||||
'connection has been closed unexpectedly',
|
||||
'SSL SYSCALL error: Bad file descriptor',
|
||||
'SSL SYSCALL error: EOF detected',
|
||||
'SSL error: decryption failed or bad record mac',
|
||||
'SSL SYSCALL error: Operation timed out',
|
||||
]:
|
||||
idx = str_e.find(msg)
|
||||
if idx >= 0 and '"' not in str_e[:idx]:
|
||||
return True
|
||||
return False
|
||||
|
||||
dialect = PGDialect_psycopg2
|
|
@ -0,0 +1,61 @@
|
|||
# testing/engines.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
"""
|
||||
.. dialect:: postgresql+psycopg2cffi
|
||||
:name: psycopg2cffi
|
||||
:dbapi: psycopg2cffi
|
||||
:connectstring: \
|
||||
postgresql+psycopg2cffi://user:password@host:port/dbname\
|
||||
[?key=value&key=value...]
|
||||
:url: http://pypi.python.org/pypi/psycopg2cffi/
|
||||
|
||||
``psycopg2cffi`` is an adaptation of ``psycopg2``, using CFFI for the C
|
||||
layer. This makes it suitable for use in e.g. PyPy. Documentation
|
||||
is as per ``psycopg2``.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:mod:`sqlalchemy.dialects.postgresql.psycopg2`
|
||||
|
||||
"""
|
||||
from .psycopg2 import PGDialect_psycopg2
|
||||
|
||||
|
||||
class PGDialect_psycopg2cffi(PGDialect_psycopg2):
|
||||
driver = 'psycopg2cffi'
|
||||
supports_unicode_statements = True
|
||||
|
||||
# psycopg2cffi's first release is 2.5.0, but reports
|
||||
# __version__ as 2.4.4. Subsequent releases seem to have
|
||||
# fixed this.
|
||||
|
||||
FEATURE_VERSION_MAP = dict(
|
||||
native_json=(2, 4, 4),
|
||||
native_jsonb=(2, 7, 1),
|
||||
sane_multi_rowcount=(2, 4, 4),
|
||||
array_oid=(2, 4, 4),
|
||||
hstore_adapter=(2, 4, 4)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('psycopg2cffi')
|
||||
|
||||
@classmethod
|
||||
def _psycopg2_extensions(cls):
|
||||
root = __import__('psycopg2cffi', fromlist=['extensions'])
|
||||
return root.extensions
|
||||
|
||||
@classmethod
|
||||
def _psycopg2_extras(cls):
|
||||
root = __import__('psycopg2cffi', fromlist=['extras'])
|
||||
return root.extras
|
||||
|
||||
|
||||
dialect = PGDialect_psycopg2cffi
|
|
@ -0,0 +1,243 @@
|
|||
# postgresql/pygresql.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: postgresql+pygresql
|
||||
:name: pygresql
|
||||
:dbapi: pgdb
|
||||
:connectstring: postgresql+pygresql://user:password@host:port/dbname\
|
||||
[?key=value&key=value...]
|
||||
:url: http://www.pygresql.org/
|
||||
"""
|
||||
|
||||
import decimal
|
||||
import re
|
||||
|
||||
from ... import exc, processors, util
|
||||
from ...types import Numeric, JSON as Json
|
||||
from ...sql.elements import Null
|
||||
from .base import PGDialect, PGCompiler, PGIdentifierPreparer, \
|
||||
_DECIMAL_TYPES, _FLOAT_TYPES, _INT_TYPES, UUID
|
||||
from .hstore import HSTORE
|
||||
from .json import JSON, JSONB
|
||||
|
||||
|
||||
class _PGNumeric(Numeric):
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
return None
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not isinstance(coltype, int):
|
||||
coltype = coltype.oid
|
||||
if self.asdecimal:
|
||||
if coltype in _FLOAT_TYPES:
|
||||
return processors.to_decimal_processor_factory(
|
||||
decimal.Decimal,
|
||||
self._effective_decimal_return_scale)
|
||||
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
|
||||
# PyGreSQL returns Decimal natively for 1700 (numeric)
|
||||
return None
|
||||
else:
|
||||
raise exc.InvalidRequestError(
|
||||
"Unknown PG numeric type: %d" % coltype)
|
||||
else:
|
||||
if coltype in _FLOAT_TYPES:
|
||||
# PyGreSQL returns float natively for 701 (float8)
|
||||
return None
|
||||
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
|
||||
return processors.to_float
|
||||
else:
|
||||
raise exc.InvalidRequestError(
|
||||
"Unknown PG numeric type: %d" % coltype)
|
||||
|
||||
|
||||
class _PGHStore(HSTORE):
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
if not dialect.has_native_hstore:
|
||||
return super(_PGHStore, self).bind_processor(dialect)
|
||||
hstore = dialect.dbapi.Hstore
|
||||
def process(value):
|
||||
if isinstance(value, dict):
|
||||
return hstore(value)
|
||||
return value
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not dialect.has_native_hstore:
|
||||
return super(_PGHStore, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGJSON(JSON):
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
if not dialect.has_native_json:
|
||||
return super(_PGJSON, self).bind_processor(dialect)
|
||||
json = dialect.dbapi.Json
|
||||
|
||||
def process(value):
|
||||
if value is self.NULL:
|
||||
value = None
|
||||
elif isinstance(value, Null) or (
|
||||
value is None and self.none_as_null):
|
||||
return None
|
||||
if value is None or isinstance(value, (dict, list)):
|
||||
return json(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not dialect.has_native_json:
|
||||
return super(_PGJSON, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGJSONB(JSONB):
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
if not dialect.has_native_json:
|
||||
return super(_PGJSONB, self).bind_processor(dialect)
|
||||
json = dialect.dbapi.Json
|
||||
|
||||
def process(value):
|
||||
if value is self.NULL:
|
||||
value = None
|
||||
elif isinstance(value, Null) or (
|
||||
value is None and self.none_as_null):
|
||||
return None
|
||||
if value is None or isinstance(value, (dict, list)):
|
||||
return json(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not dialect.has_native_json:
|
||||
return super(_PGJSONB, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGUUID(UUID):
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
if not dialect.has_native_uuid:
|
||||
return super(_PGUUID, self).bind_processor(dialect)
|
||||
uuid = dialect.dbapi.Uuid
|
||||
|
||||
def process(value):
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, (str, bytes)):
|
||||
if len(value) == 16:
|
||||
return uuid(bytes=value)
|
||||
return uuid(value)
|
||||
if isinstance(value, int):
|
||||
return uuid(int=value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not dialect.has_native_uuid:
|
||||
return super(_PGUUID, self).result_processor(dialect, coltype)
|
||||
if not self.as_uuid:
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return str(value)
|
||||
return process
|
||||
|
||||
|
||||
class _PGCompiler(PGCompiler):
|
||||
|
||||
def visit_mod_binary(self, binary, operator, **kw):
|
||||
return self.process(binary.left, **kw) + " %% " + \
|
||||
self.process(binary.right, **kw)
|
||||
|
||||
def post_process_text(self, text):
|
||||
return text.replace('%', '%%')
|
||||
|
||||
|
||||
class _PGIdentifierPreparer(PGIdentifierPreparer):
|
||||
|
||||
def _escape_identifier(self, value):
|
||||
value = value.replace(self.escape_quote, self.escape_to_quote)
|
||||
return value.replace('%', '%%')
|
||||
|
||||
|
||||
class PGDialect_pygresql(PGDialect):
|
||||
|
||||
driver = 'pygresql'
|
||||
|
||||
statement_compiler = _PGCompiler
|
||||
preparer = _PGIdentifierPreparer
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
import pgdb
|
||||
return pgdb
|
||||
|
||||
colspecs = util.update_copy(
|
||||
PGDialect.colspecs,
|
||||
{
|
||||
Numeric: _PGNumeric,
|
||||
HSTORE: _PGHStore,
|
||||
Json: _PGJSON,
|
||||
JSON: _PGJSON,
|
||||
JSONB: _PGJSONB,
|
||||
UUID: _PGUUID,
|
||||
}
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(PGDialect_pygresql, self).__init__(**kwargs)
|
||||
try:
|
||||
version = self.dbapi.version
|
||||
m = re.match(r'(\d+)\.(\d+)', version)
|
||||
version = (int(m.group(1)), int(m.group(2)))
|
||||
except (AttributeError, ValueError, TypeError):
|
||||
version = (0, 0)
|
||||
self.dbapi_version = version
|
||||
if version < (5, 0):
|
||||
has_native_hstore = has_native_json = has_native_uuid = False
|
||||
if version != (0, 0):
|
||||
util.warn("PyGreSQL is only fully supported by SQLAlchemy"
|
||||
" since version 5.0.")
|
||||
else:
|
||||
self.supports_unicode_statements = True
|
||||
self.supports_unicode_binds = True
|
||||
has_native_hstore = has_native_json = has_native_uuid = True
|
||||
self.has_native_hstore = has_native_hstore
|
||||
self.has_native_json = has_native_json
|
||||
self.has_native_uuid = has_native_uuid
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
if 'port' in opts:
|
||||
opts['host'] = '%s:%s' % (
|
||||
opts.get('host', '').rsplit(':', 1)[0], opts.pop('port'))
|
||||
opts.update(url.query)
|
||||
return [], opts
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, self.dbapi.Error):
|
||||
if not connection:
|
||||
return False
|
||||
try:
|
||||
connection = connection.connection
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
if not connection:
|
||||
return False
|
||||
try:
|
||||
return connection.closed
|
||||
except AttributeError: # PyGreSQL < 5.0
|
||||
return connection._cnx is None
|
||||
return False
|
||||
|
||||
|
||||
dialect = PGDialect_pygresql
|
|
@ -0,0 +1,97 @@
|
|||
# postgresql/pypostgresql.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: postgresql+pypostgresql
|
||||
:name: py-postgresql
|
||||
:dbapi: pypostgresql
|
||||
:connectstring: postgresql+pypostgresql://user:password@host:port/dbname\
|
||||
[?key=value&key=value...]
|
||||
:url: http://python.projects.pgfoundry.org/
|
||||
|
||||
|
||||
"""
|
||||
from ... import util
|
||||
from ... import types as sqltypes
|
||||
from .base import PGDialect, PGExecutionContext
|
||||
from ... import processors
|
||||
|
||||
|
||||
class PGNumeric(sqltypes.Numeric):
|
||||
def bind_processor(self, dialect):
|
||||
return processors.to_str
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if self.asdecimal:
|
||||
return None
|
||||
else:
|
||||
return processors.to_float
|
||||
|
||||
|
||||
class PGExecutionContext_pypostgresql(PGExecutionContext):
|
||||
pass
|
||||
|
||||
|
||||
class PGDialect_pypostgresql(PGDialect):
|
||||
driver = 'pypostgresql'
|
||||
|
||||
supports_unicode_statements = True
|
||||
supports_unicode_binds = True
|
||||
description_encoding = None
|
||||
default_paramstyle = 'pyformat'
|
||||
|
||||
# requires trunk version to support sane rowcounts
|
||||
# TODO: use dbapi version information to set this flag appropriately
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = False
|
||||
|
||||
execution_ctx_cls = PGExecutionContext_pypostgresql
|
||||
colspecs = util.update_copy(
|
||||
PGDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: PGNumeric,
|
||||
|
||||
# prevents PGNumeric from being used
|
||||
sqltypes.Float: sqltypes.Float,
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
from postgresql.driver import dbapi20
|
||||
return dbapi20
|
||||
|
||||
_DBAPI_ERROR_NAMES = [
|
||||
"Error",
|
||||
"InterfaceError", "DatabaseError", "DataError",
|
||||
"OperationalError", "IntegrityError", "InternalError",
|
||||
"ProgrammingError", "NotSupportedError"
|
||||
]
|
||||
|
||||
@util.memoized_property
|
||||
def dbapi_exception_translation_map(self):
|
||||
if self.dbapi is None:
|
||||
return {}
|
||||
|
||||
return dict(
|
||||
(getattr(self.dbapi, name).__name__, name)
|
||||
for name in self._DBAPI_ERROR_NAMES
|
||||
)
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
if 'port' in opts:
|
||||
opts['port'] = int(opts['port'])
|
||||
else:
|
||||
opts['port'] = 5432
|
||||
opts.update(url.query)
|
||||
return ([], opts)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
return "connection is closed" in str(e)
|
||||
|
||||
dialect = PGDialect_pypostgresql
|
|
@ -0,0 +1,172 @@
|
|||
# Copyright (C) 2013-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .base import ischema_names
|
||||
from ... import types as sqltypes
|
||||
|
||||
__all__ = ('INT4RANGE', 'INT8RANGE', 'NUMRANGE')
|
||||
|
||||
|
||||
class RangeOperators(object):
|
||||
"""
|
||||
This mixin provides functionality for the Range Operators
|
||||
listed in Table 9-44 of the `postgres documentation`__ for Range
|
||||
Functions and Operators. It is used by all the range types
|
||||
provided in the ``postgres`` dialect and can likely be used for
|
||||
any range types you create yourself.
|
||||
|
||||
__ http://www.postgresql.org/docs/devel/static/functions-range.html
|
||||
|
||||
No extra support is provided for the Range Functions listed in
|
||||
Table 9-45 of the postgres documentation. For these, the normal
|
||||
:func:`~sqlalchemy.sql.expression.func` object should be used.
|
||||
|
||||
.. versionadded:: 0.8.2 Support for PostgreSQL RANGE operations.
|
||||
|
||||
"""
|
||||
|
||||
class comparator_factory(sqltypes.Concatenable.Comparator):
|
||||
"""Define comparison operations for range types."""
|
||||
|
||||
def __ne__(self, other):
|
||||
"Boolean expression. Returns true if two ranges are not equal"
|
||||
if other is None:
|
||||
return super(
|
||||
RangeOperators.comparator_factory, self).__ne__(other)
|
||||
else:
|
||||
return self.expr.op('<>')(other)
|
||||
|
||||
def contains(self, other, **kw):
|
||||
"""Boolean expression. Returns true if the right hand operand,
|
||||
which can be an element or a range, is contained within the
|
||||
column.
|
||||
"""
|
||||
return self.expr.op('@>')(other)
|
||||
|
||||
def contained_by(self, other):
|
||||
"""Boolean expression. Returns true if the column is contained
|
||||
within the right hand operand.
|
||||
"""
|
||||
return self.expr.op('<@')(other)
|
||||
|
||||
def overlaps(self, other):
|
||||
"""Boolean expression. Returns true if the column overlaps
|
||||
(has points in common with) the right hand operand.
|
||||
"""
|
||||
return self.expr.op('&&')(other)
|
||||
|
||||
def strictly_left_of(self, other):
|
||||
"""Boolean expression. Returns true if the column is strictly
|
||||
left of the right hand operand.
|
||||
"""
|
||||
return self.expr.op('<<')(other)
|
||||
|
||||
__lshift__ = strictly_left_of
|
||||
|
||||
def strictly_right_of(self, other):
|
||||
"""Boolean expression. Returns true if the column is strictly
|
||||
right of the right hand operand.
|
||||
"""
|
||||
return self.expr.op('>>')(other)
|
||||
|
||||
__rshift__ = strictly_right_of
|
||||
|
||||
def not_extend_right_of(self, other):
|
||||
"""Boolean expression. Returns true if the range in the column
|
||||
does not extend right of the range in the operand.
|
||||
"""
|
||||
return self.expr.op('&<')(other)
|
||||
|
||||
def not_extend_left_of(self, other):
|
||||
"""Boolean expression. Returns true if the range in the column
|
||||
does not extend left of the range in the operand.
|
||||
"""
|
||||
return self.expr.op('&>')(other)
|
||||
|
||||
def adjacent_to(self, other):
|
||||
"""Boolean expression. Returns true if the range in the column
|
||||
is adjacent to the range in the operand.
|
||||
"""
|
||||
return self.expr.op('-|-')(other)
|
||||
|
||||
def __add__(self, other):
|
||||
"""Range expression. Returns the union of the two ranges.
|
||||
Will raise an exception if the resulting range is not
|
||||
contigous.
|
||||
"""
|
||||
return self.expr.op('+')(other)
|
||||
|
||||
|
||||
class INT4RANGE(RangeOperators, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL INT4RANGE type.
|
||||
|
||||
.. versionadded:: 0.8.2
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'INT4RANGE'
|
||||
|
||||
ischema_names['int4range'] = INT4RANGE
|
||||
|
||||
|
||||
class INT8RANGE(RangeOperators, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL INT8RANGE type.
|
||||
|
||||
.. versionadded:: 0.8.2
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'INT8RANGE'
|
||||
|
||||
ischema_names['int8range'] = INT8RANGE
|
||||
|
||||
|
||||
class NUMRANGE(RangeOperators, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL NUMRANGE type.
|
||||
|
||||
.. versionadded:: 0.8.2
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'NUMRANGE'
|
||||
|
||||
ischema_names['numrange'] = NUMRANGE
|
||||
|
||||
|
||||
class DATERANGE(RangeOperators, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL DATERANGE type.
|
||||
|
||||
.. versionadded:: 0.8.2
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'DATERANGE'
|
||||
|
||||
ischema_names['daterange'] = DATERANGE
|
||||
|
||||
|
||||
class TSRANGE(RangeOperators, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL TSRANGE type.
|
||||
|
||||
.. versionadded:: 0.8.2
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'TSRANGE'
|
||||
|
||||
ischema_names['tsrange'] = TSRANGE
|
||||
|
||||
|
||||
class TSTZRANGE(RangeOperators, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL TSTZRANGE type.
|
||||
|
||||
.. versionadded:: 0.8.2
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'TSTZRANGE'
|
||||
|
||||
ischema_names['tstzrange'] = TSTZRANGE
|
|
@ -0,0 +1,46 @@
|
|||
# postgresql/zxjdbc.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: postgresql+zxjdbc
|
||||
:name: zxJDBC for Jython
|
||||
:dbapi: zxjdbc
|
||||
:connectstring: postgresql+zxjdbc://scott:tiger@localhost/db
|
||||
:driverurl: http://jdbc.postgresql.org/
|
||||
|
||||
|
||||
"""
|
||||
from ...connectors.zxJDBC import ZxJDBCConnector
|
||||
from .base import PGDialect, PGExecutionContext
|
||||
|
||||
|
||||
class PGExecutionContext_zxjdbc(PGExecutionContext):
|
||||
|
||||
def create_cursor(self):
|
||||
cursor = self._dbapi_connection.cursor()
|
||||
cursor.datahandler = self.dialect.DataHandler(cursor.datahandler)
|
||||
return cursor
|
||||
|
||||
|
||||
class PGDialect_zxjdbc(ZxJDBCConnector, PGDialect):
|
||||
jdbc_db_name = 'postgresql'
|
||||
jdbc_driver_name = 'org.postgresql.Driver'
|
||||
|
||||
execution_ctx_cls = PGExecutionContext_zxjdbc
|
||||
|
||||
supports_native_decimal = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(PGDialect_zxjdbc, self).__init__(*args, **kwargs)
|
||||
from com.ziclix.python.sql.handler import PostgresqlDataHandler
|
||||
self.DataHandler = PostgresqlDataHandler
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
parts = connection.connection.dbversion.split('.')
|
||||
return tuple(int(x) for x in parts)
|
||||
|
||||
dialect = PGDialect_zxjdbc
|
|
@ -0,0 +1,21 @@
|
|||
# sqlite/__init__.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from . import base, pysqlite, pysqlcipher # noqa
|
||||
|
||||
from sqlalchemy.dialects.sqlite.base import (
|
||||
BLOB, BOOLEAN, CHAR, DATE, DATETIME, DECIMAL, FLOAT, INTEGER, REAL,
|
||||
NUMERIC, SMALLINT, TEXT, TIME, TIMESTAMP, VARCHAR
|
||||
)
|
||||
|
||||
# default dialect
|
||||
base.dialect = dialect = pysqlite.dialect
|
||||
|
||||
|
||||
__all__ = ('BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME', 'DECIMAL',
|
||||
'FLOAT', 'INTEGER', 'NUMERIC', 'SMALLINT', 'TEXT', 'TIME',
|
||||
'TIMESTAMP', 'VARCHAR', 'REAL', 'dialect')
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,130 @@
|
|||
# sqlite/pysqlcipher.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: sqlite+pysqlcipher
|
||||
:name: pysqlcipher
|
||||
:dbapi: pysqlcipher
|
||||
:connectstring: sqlite+pysqlcipher://:passphrase/file_path[?kdf_iter=<iter>]
|
||||
:url: https://pypi.python.org/pypi/pysqlcipher
|
||||
|
||||
``pysqlcipher`` is a fork of the standard ``pysqlite`` driver to make
|
||||
use of the `SQLCipher <https://www.zetetic.net/sqlcipher>`_ backend.
|
||||
|
||||
``pysqlcipher3`` is a fork of ``pysqlcipher`` for Python 3. This dialect
|
||||
will attempt to import it if ``pysqlcipher`` is non-present.
|
||||
|
||||
.. versionadded:: 1.1.4 - added fallback import for pysqlcipher3
|
||||
|
||||
.. versionadded:: 0.9.9 - added pysqlcipher dialect
|
||||
|
||||
Driver
|
||||
------
|
||||
|
||||
The driver here is the `pysqlcipher <https://pypi.python.org/pypi/pysqlcipher>`_
|
||||
driver, which makes use of the SQLCipher engine. This system essentially
|
||||
introduces new PRAGMA commands to SQLite which allows the setting of a
|
||||
passphrase and other encryption parameters, allowing the database
|
||||
file to be encrypted.
|
||||
|
||||
`pysqlcipher3` is a fork of `pysqlcipher` with support for Python 3,
|
||||
the driver is the same.
|
||||
|
||||
Connect Strings
|
||||
---------------
|
||||
|
||||
The format of the connect string is in every way the same as that
|
||||
of the :mod:`~sqlalchemy.dialects.sqlite.pysqlite` driver, except that the
|
||||
"password" field is now accepted, which should contain a passphrase::
|
||||
|
||||
e = create_engine('sqlite+pysqlcipher://:testing@/foo.db')
|
||||
|
||||
For an absolute file path, two leading slashes should be used for the
|
||||
database name::
|
||||
|
||||
e = create_engine('sqlite+pysqlcipher://:testing@//path/to/foo.db')
|
||||
|
||||
A selection of additional encryption-related pragmas supported by SQLCipher
|
||||
as documented at https://www.zetetic.net/sqlcipher/sqlcipher-api/ can be passed
|
||||
in the query string, and will result in that PRAGMA being called for each
|
||||
new connection. Currently, ``cipher``, ``kdf_iter``
|
||||
``cipher_page_size`` and ``cipher_use_hmac`` are supported::
|
||||
|
||||
e = create_engine('sqlite+pysqlcipher://:testing@/foo.db?cipher=aes-256-cfb&kdf_iter=64000')
|
||||
|
||||
|
||||
Pooling Behavior
|
||||
----------------
|
||||
|
||||
The driver makes a change to the default pool behavior of pysqlite
|
||||
as described in :ref:`pysqlite_threading_pooling`. The pysqlcipher driver
|
||||
has been observed to be significantly slower on connection than the
|
||||
pysqlite driver, most likely due to the encryption overhead, so the
|
||||
dialect here defaults to using the :class:`.SingletonThreadPool`
|
||||
implementation,
|
||||
instead of the :class:`.NullPool` pool used by pysqlite. As always, the pool
|
||||
implementation is entirely configurable using the
|
||||
:paramref:`.create_engine.poolclass` parameter; the :class:`.StaticPool` may
|
||||
be more feasible for single-threaded use, or :class:`.NullPool` may be used
|
||||
to prevent unencrypted connections from being held open for long periods of
|
||||
time, at the expense of slower startup time for new connections.
|
||||
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from .pysqlite import SQLiteDialect_pysqlite
|
||||
from ...engine import url as _url
|
||||
from ... import pool
|
||||
|
||||
|
||||
class SQLiteDialect_pysqlcipher(SQLiteDialect_pysqlite):
|
||||
driver = 'pysqlcipher'
|
||||
|
||||
pragmas = ('kdf_iter', 'cipher', 'cipher_page_size', 'cipher_use_hmac')
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
try:
|
||||
from pysqlcipher import dbapi2 as sqlcipher
|
||||
except ImportError as e:
|
||||
try:
|
||||
from pysqlcipher3 import dbapi2 as sqlcipher
|
||||
except ImportError:
|
||||
raise e
|
||||
return sqlcipher
|
||||
|
||||
@classmethod
|
||||
def get_pool_class(cls, url):
|
||||
return pool.SingletonThreadPool
|
||||
|
||||
def connect(self, *cargs, **cparams):
|
||||
passphrase = cparams.pop('passphrase', '')
|
||||
|
||||
pragmas = dict(
|
||||
(key, cparams.pop(key, None)) for key in
|
||||
self.pragmas
|
||||
)
|
||||
|
||||
conn = super(SQLiteDialect_pysqlcipher, self).\
|
||||
connect(*cargs, **cparams)
|
||||
conn.execute('pragma key="%s"' % passphrase)
|
||||
for prag, value in pragmas.items():
|
||||
if value is not None:
|
||||
conn.execute('pragma %s="%s"' % (prag, value))
|
||||
|
||||
return conn
|
||||
|
||||
def create_connect_args(self, url):
|
||||
super_url = _url.URL(
|
||||
url.drivername, username=url.username,
|
||||
host=url.host, database=url.database, query=url.query)
|
||||
c_args, opts = super(SQLiteDialect_pysqlcipher, self).\
|
||||
create_connect_args(super_url)
|
||||
opts['passphrase'] = url.password
|
||||
return c_args, opts
|
||||
|
||||
dialect = SQLiteDialect_pysqlcipher
|
|
@ -0,0 +1,377 @@
|
|||
# sqlite/pysqlite.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
r"""
|
||||
.. dialect:: sqlite+pysqlite
|
||||
:name: pysqlite
|
||||
:dbapi: sqlite3
|
||||
:connectstring: sqlite+pysqlite:///file_path
|
||||
:url: http://docs.python.org/library/sqlite3.html
|
||||
|
||||
Note that ``pysqlite`` is the same driver as the ``sqlite3``
|
||||
module included with the Python distribution.
|
||||
|
||||
Driver
|
||||
------
|
||||
|
||||
When using Python 2.5 and above, the built in ``sqlite3`` driver is
|
||||
already installed and no additional installation is needed. Otherwise,
|
||||
the ``pysqlite2`` driver needs to be present. This is the same driver as
|
||||
``sqlite3``, just with a different name.
|
||||
|
||||
The ``pysqlite2`` driver will be loaded first, and if not found, ``sqlite3``
|
||||
is loaded. This allows an explicitly installed pysqlite driver to take
|
||||
precedence over the built in one. As with all dialects, a specific
|
||||
DBAPI module may be provided to :func:`~sqlalchemy.create_engine()` to control
|
||||
this explicitly::
|
||||
|
||||
from sqlite3 import dbapi2 as sqlite
|
||||
e = create_engine('sqlite+pysqlite:///file.db', module=sqlite)
|
||||
|
||||
|
||||
Connect Strings
|
||||
---------------
|
||||
|
||||
The file specification for the SQLite database is taken as the "database"
|
||||
portion of the URL. Note that the format of a SQLAlchemy url is::
|
||||
|
||||
driver://user:pass@host/database
|
||||
|
||||
This means that the actual filename to be used starts with the characters to
|
||||
the **right** of the third slash. So connecting to a relative filepath
|
||||
looks like::
|
||||
|
||||
# relative path
|
||||
e = create_engine('sqlite:///path/to/database.db')
|
||||
|
||||
An absolute path, which is denoted by starting with a slash, means you
|
||||
need **four** slashes::
|
||||
|
||||
# absolute path
|
||||
e = create_engine('sqlite:////path/to/database.db')
|
||||
|
||||
To use a Windows path, regular drive specifications and backslashes can be
|
||||
used. Double backslashes are probably needed::
|
||||
|
||||
# absolute path on Windows
|
||||
e = create_engine('sqlite:///C:\\path\\to\\database.db')
|
||||
|
||||
The sqlite ``:memory:`` identifier is the default if no filepath is
|
||||
present. Specify ``sqlite://`` and nothing else::
|
||||
|
||||
# in-memory database
|
||||
e = create_engine('sqlite://')
|
||||
|
||||
Compatibility with sqlite3 "native" date and datetime types
|
||||
-----------------------------------------------------------
|
||||
|
||||
The pysqlite driver includes the sqlite3.PARSE_DECLTYPES and
|
||||
sqlite3.PARSE_COLNAMES options, which have the effect of any column
|
||||
or expression explicitly cast as "date" or "timestamp" will be converted
|
||||
to a Python date or datetime object. The date and datetime types provided
|
||||
with the pysqlite dialect are not currently compatible with these options,
|
||||
since they render the ISO date/datetime including microseconds, which
|
||||
pysqlite's driver does not. Additionally, SQLAlchemy does not at
|
||||
this time automatically render the "cast" syntax required for the
|
||||
freestanding functions "current_timestamp" and "current_date" to return
|
||||
datetime/date types natively. Unfortunately, pysqlite
|
||||
does not provide the standard DBAPI types in ``cursor.description``,
|
||||
leaving SQLAlchemy with no way to detect these types on the fly
|
||||
without expensive per-row type checks.
|
||||
|
||||
Keeping in mind that pysqlite's parsing option is not recommended,
|
||||
nor should be necessary, for use with SQLAlchemy, usage of PARSE_DECLTYPES
|
||||
can be forced if one configures "native_datetime=True" on create_engine()::
|
||||
|
||||
engine = create_engine('sqlite://',
|
||||
connect_args={'detect_types':
|
||||
sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES},
|
||||
native_datetime=True
|
||||
)
|
||||
|
||||
With this flag enabled, the DATE and TIMESTAMP types (but note - not the
|
||||
DATETIME or TIME types...confused yet ?) will not perform any bind parameter
|
||||
or result processing. Execution of "func.current_date()" will return a string.
|
||||
"func.current_timestamp()" is registered as returning a DATETIME type in
|
||||
SQLAlchemy, so this function still receives SQLAlchemy-level result
|
||||
processing.
|
||||
|
||||
.. _pysqlite_threading_pooling:
|
||||
|
||||
Threading/Pooling Behavior
|
||||
---------------------------
|
||||
|
||||
Pysqlite's default behavior is to prohibit the usage of a single connection
|
||||
in more than one thread. This is originally intended to work with older
|
||||
versions of SQLite that did not support multithreaded operation under
|
||||
various circumstances. In particular, older SQLite versions
|
||||
did not allow a ``:memory:`` database to be used in multiple threads
|
||||
under any circumstances.
|
||||
|
||||
Pysqlite does include a now-undocumented flag known as
|
||||
``check_same_thread`` which will disable this check, however note that
|
||||
pysqlite connections are still not safe to use in concurrently in multiple
|
||||
threads. In particular, any statement execution calls would need to be
|
||||
externally mutexed, as Pysqlite does not provide for thread-safe propagation
|
||||
of error messages among other things. So while even ``:memory:`` databases
|
||||
can be shared among threads in modern SQLite, Pysqlite doesn't provide enough
|
||||
thread-safety to make this usage worth it.
|
||||
|
||||
SQLAlchemy sets up pooling to work with Pysqlite's default behavior:
|
||||
|
||||
* When a ``:memory:`` SQLite database is specified, the dialect by default
|
||||
will use :class:`.SingletonThreadPool`. This pool maintains a single
|
||||
connection per thread, so that all access to the engine within the current
|
||||
thread use the same ``:memory:`` database - other threads would access a
|
||||
different ``:memory:`` database.
|
||||
* When a file-based database is specified, the dialect will use
|
||||
:class:`.NullPool` as the source of connections. This pool closes and
|
||||
discards connections which are returned to the pool immediately. SQLite
|
||||
file-based connections have extremely low overhead, so pooling is not
|
||||
necessary. The scheme also prevents a connection from being used again in
|
||||
a different thread and works best with SQLite's coarse-grained file locking.
|
||||
|
||||
.. versionchanged:: 0.7
|
||||
Default selection of :class:`.NullPool` for SQLite file-based databases.
|
||||
Previous versions select :class:`.SingletonThreadPool` by
|
||||
default for all SQLite databases.
|
||||
|
||||
|
||||
Using a Memory Database in Multiple Threads
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To use a ``:memory:`` database in a multithreaded scenario, the same
|
||||
connection object must be shared among threads, since the database exists
|
||||
only within the scope of that connection. The
|
||||
:class:`.StaticPool` implementation will maintain a single connection
|
||||
globally, and the ``check_same_thread`` flag can be passed to Pysqlite
|
||||
as ``False``::
|
||||
|
||||
from sqlalchemy.pool import StaticPool
|
||||
engine = create_engine('sqlite://',
|
||||
connect_args={'check_same_thread':False},
|
||||
poolclass=StaticPool)
|
||||
|
||||
Note that using a ``:memory:`` database in multiple threads requires a recent
|
||||
version of SQLite.
|
||||
|
||||
Using Temporary Tables with SQLite
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Due to the way SQLite deals with temporary tables, if you wish to use a
|
||||
temporary table in a file-based SQLite database across multiple checkouts
|
||||
from the connection pool, such as when using an ORM :class:`.Session` where
|
||||
the temporary table should continue to remain after :meth:`.Session.commit` or
|
||||
:meth:`.Session.rollback` is called, a pool which maintains a single
|
||||
connection must be used. Use :class:`.SingletonThreadPool` if the scope is
|
||||
only needed within the current thread, or :class:`.StaticPool` is scope is
|
||||
needed within multiple threads for this case::
|
||||
|
||||
# maintain the same connection per thread
|
||||
from sqlalchemy.pool import SingletonThreadPool
|
||||
engine = create_engine('sqlite:///mydb.db',
|
||||
poolclass=SingletonThreadPool)
|
||||
|
||||
|
||||
# maintain the same connection across all threads
|
||||
from sqlalchemy.pool import StaticPool
|
||||
engine = create_engine('sqlite:///mydb.db',
|
||||
poolclass=StaticPool)
|
||||
|
||||
Note that :class:`.SingletonThreadPool` should be configured for the number
|
||||
of threads that are to be used; beyond that number, connections will be
|
||||
closed out in a non deterministic way.
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
The pysqlite driver only returns Python ``unicode`` objects in result sets,
|
||||
never plain strings, and accommodates ``unicode`` objects within bound
|
||||
parameter values in all cases. Regardless of the SQLAlchemy string type in
|
||||
use, string-based result values will by Python ``unicode`` in Python 2.
|
||||
The :class:`.Unicode` type should still be used to indicate those columns that
|
||||
require unicode, however, so that non-``unicode`` values passed inadvertently
|
||||
will emit a warning. Pysqlite will emit an error if a non-``unicode`` string
|
||||
is passed containing non-ASCII characters.
|
||||
|
||||
.. _pysqlite_serializable:
|
||||
|
||||
Serializable isolation / Savepoints / Transactional DDL
|
||||
-------------------------------------------------------
|
||||
|
||||
In the section :ref:`sqlite_concurrency`, we refer to the pysqlite
|
||||
driver's assortment of issues that prevent several features of SQLite
|
||||
from working correctly. The pysqlite DBAPI driver has several
|
||||
long-standing bugs which impact the correctness of its transactional
|
||||
behavior. In its default mode of operation, SQLite features such as
|
||||
SERIALIZABLE isolation, transactional DDL, and SAVEPOINT support are
|
||||
non-functional, and in order to use these features, workarounds must
|
||||
be taken.
|
||||
|
||||
The issue is essentially that the driver attempts to second-guess the user's
|
||||
intent, failing to start transactions and sometimes ending them prematurely, in
|
||||
an effort to minimize the SQLite databases's file locking behavior, even
|
||||
though SQLite itself uses "shared" locks for read-only activities.
|
||||
|
||||
SQLAlchemy chooses to not alter this behavior by default, as it is the
|
||||
long-expected behavior of the pysqlite driver; if and when the pysqlite
|
||||
driver attempts to repair these issues, that will be more of a driver towards
|
||||
defaults for SQLAlchemy.
|
||||
|
||||
The good news is that with a few events, we can implement transactional
|
||||
support fully, by disabling pysqlite's feature entirely and emitting BEGIN
|
||||
ourselves. This is achieved using two event listeners::
|
||||
|
||||
from sqlalchemy import create_engine, event
|
||||
|
||||
engine = create_engine("sqlite:///myfile.db")
|
||||
|
||||
@event.listens_for(engine, "connect")
|
||||
def do_connect(dbapi_connection, connection_record):
|
||||
# disable pysqlite's emitting of the BEGIN statement entirely.
|
||||
# also stops it from emitting COMMIT before any DDL.
|
||||
dbapi_connection.isolation_level = None
|
||||
|
||||
@event.listens_for(engine, "begin")
|
||||
def do_begin(conn):
|
||||
# emit our own BEGIN
|
||||
conn.execute("BEGIN")
|
||||
|
||||
Above, we intercept a new pysqlite connection and disable any transactional
|
||||
integration. Then, at the point at which SQLAlchemy knows that transaction
|
||||
scope is to begin, we emit ``"BEGIN"`` ourselves.
|
||||
|
||||
When we take control of ``"BEGIN"``, we can also control directly SQLite's
|
||||
locking modes, introduced at `BEGIN TRANSACTION <http://sqlite.org/lang_transaction.html>`_,
|
||||
by adding the desired locking mode to our ``"BEGIN"``::
|
||||
|
||||
@event.listens_for(engine, "begin")
|
||||
def do_begin(conn):
|
||||
conn.execute("BEGIN EXCLUSIVE")
|
||||
|
||||
.. seealso::
|
||||
|
||||
`BEGIN TRANSACTION <http://sqlite.org/lang_transaction.html>`_ - on the SQLite site
|
||||
|
||||
`sqlite3 SELECT does not BEGIN a transaction <http://bugs.python.org/issue9924>`_ - on the Python bug tracker
|
||||
|
||||
`sqlite3 module breaks transactions and potentially corrupts data <http://bugs.python.org/issue10740>`_ - on the Python bug tracker
|
||||
|
||||
|
||||
"""
|
||||
|
||||
from sqlalchemy.dialects.sqlite.base import SQLiteDialect, DATETIME, DATE
|
||||
from sqlalchemy import exc, pool
|
||||
from sqlalchemy import types as sqltypes
|
||||
from sqlalchemy import util
|
||||
|
||||
import os
|
||||
|
||||
|
||||
class _SQLite_pysqliteTimeStamp(DATETIME):
|
||||
def bind_processor(self, dialect):
|
||||
if dialect.native_datetime:
|
||||
return None
|
||||
else:
|
||||
return DATETIME.bind_processor(self, dialect)
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect.native_datetime:
|
||||
return None
|
||||
else:
|
||||
return DATETIME.result_processor(self, dialect, coltype)
|
||||
|
||||
|
||||
class _SQLite_pysqliteDate(DATE):
|
||||
def bind_processor(self, dialect):
|
||||
if dialect.native_datetime:
|
||||
return None
|
||||
else:
|
||||
return DATE.bind_processor(self, dialect)
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect.native_datetime:
|
||||
return None
|
||||
else:
|
||||
return DATE.result_processor(self, dialect, coltype)
|
||||
|
||||
|
||||
class SQLiteDialect_pysqlite(SQLiteDialect):
|
||||
default_paramstyle = 'qmark'
|
||||
|
||||
colspecs = util.update_copy(
|
||||
SQLiteDialect.colspecs,
|
||||
{
|
||||
sqltypes.Date: _SQLite_pysqliteDate,
|
||||
sqltypes.TIMESTAMP: _SQLite_pysqliteTimeStamp,
|
||||
}
|
||||
)
|
||||
|
||||
if not util.py2k:
|
||||
description_encoding = None
|
||||
|
||||
driver = 'pysqlite'
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
SQLiteDialect.__init__(self, **kwargs)
|
||||
|
||||
if self.dbapi is not None:
|
||||
sqlite_ver = self.dbapi.version_info
|
||||
if sqlite_ver < (2, 1, 3):
|
||||
util.warn(
|
||||
("The installed version of pysqlite2 (%s) is out-dated "
|
||||
"and will cause errors in some cases. Version 2.1.3 "
|
||||
"or greater is recommended.") %
|
||||
'.'.join([str(subver) for subver in sqlite_ver]))
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
try:
|
||||
from pysqlite2 import dbapi2 as sqlite
|
||||
except ImportError:
|
||||
try:
|
||||
from sqlite3 import dbapi2 as sqlite # try 2.5+ stdlib name.
|
||||
except ImportError as e:
|
||||
raise e
|
||||
return sqlite
|
||||
|
||||
@classmethod
|
||||
def get_pool_class(cls, url):
|
||||
if url.database and url.database != ':memory:':
|
||||
return pool.NullPool
|
||||
else:
|
||||
return pool.SingletonThreadPool
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
return self.dbapi.sqlite_version_info
|
||||
|
||||
def create_connect_args(self, url):
|
||||
if url.username or url.password or url.host or url.port:
|
||||
raise exc.ArgumentError(
|
||||
"Invalid SQLite URL: %s\n"
|
||||
"Valid SQLite URL forms are:\n"
|
||||
" sqlite:///:memory: (or, sqlite://)\n"
|
||||
" sqlite:///relative/path/to/file.db\n"
|
||||
" sqlite:////absolute/path/to/file.db" % (url,))
|
||||
filename = url.database or ':memory:'
|
||||
if filename != ':memory:':
|
||||
filename = os.path.abspath(filename)
|
||||
|
||||
opts = url.query.copy()
|
||||
util.coerce_kw_type(opts, 'timeout', float)
|
||||
util.coerce_kw_type(opts, 'isolation_level', str)
|
||||
util.coerce_kw_type(opts, 'detect_types', int)
|
||||
util.coerce_kw_type(opts, 'check_same_thread', bool)
|
||||
util.coerce_kw_type(opts, 'cached_statements', int)
|
||||
|
||||
return ([filename], opts)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
return isinstance(e, self.dbapi.ProgrammingError) and \
|
||||
"Cannot operate on a closed database." in str(e)
|
||||
|
||||
dialect = SQLiteDialect_pysqlite
|
|
@ -0,0 +1,27 @@
|
|||
# sybase/__init__.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from . import base, pysybase, pyodbc # noqa
|
||||
|
||||
from .base import CHAR, VARCHAR, TIME, NCHAR, NVARCHAR,\
|
||||
TEXT, DATE, DATETIME, FLOAT, NUMERIC,\
|
||||
BIGINT, INT, INTEGER, SMALLINT, BINARY,\
|
||||
VARBINARY, UNITEXT, UNICHAR, UNIVARCHAR,\
|
||||
IMAGE, BIT, MONEY, SMALLMONEY, TINYINT
|
||||
|
||||
# default dialect
|
||||
base.dialect = dialect = pyodbc.dialect
|
||||
|
||||
|
||||
__all__ = (
|
||||
'CHAR', 'VARCHAR', 'TIME', 'NCHAR', 'NVARCHAR',
|
||||
'TEXT', 'DATE', 'DATETIME', 'FLOAT', 'NUMERIC',
|
||||
'BIGINT', 'INT', 'INTEGER', 'SMALLINT', 'BINARY',
|
||||
'VARBINARY', 'UNITEXT', 'UNICHAR', 'UNIVARCHAR',
|
||||
'IMAGE', 'BIT', 'MONEY', 'SMALLMONEY', 'TINYINT',
|
||||
'dialect'
|
||||
)
|
|
@ -0,0 +1,839 @@
|
|||
# sybase/base.py
|
||||
# Copyright (C) 2010-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
# get_select_precolumns(), limit_clause() implementation
|
||||
# copyright (C) 2007 Fisch Asset Management
|
||||
# AG http://www.fam.ch, with coding by Alexander Houben
|
||||
# alexander.houben@thor-solutions.ch
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
|
||||
.. dialect:: sybase
|
||||
:name: Sybase
|
||||
|
||||
.. note::
|
||||
|
||||
The Sybase dialect functions on current SQLAlchemy versions
|
||||
but is not regularly tested, and may have many issues and
|
||||
caveats not currently handled.
|
||||
|
||||
"""
|
||||
import operator
|
||||
import re
|
||||
|
||||
from sqlalchemy.sql import compiler, expression, text, bindparam
|
||||
from sqlalchemy.engine import default, base, reflection
|
||||
from sqlalchemy import types as sqltypes
|
||||
from sqlalchemy.sql import operators as sql_operators
|
||||
from sqlalchemy import schema as sa_schema
|
||||
from sqlalchemy import util, sql, exc
|
||||
|
||||
from sqlalchemy.types import CHAR, VARCHAR, TIME, NCHAR, NVARCHAR,\
|
||||
TEXT, DATE, DATETIME, FLOAT, NUMERIC,\
|
||||
BIGINT, INT, INTEGER, SMALLINT, BINARY,\
|
||||
VARBINARY, DECIMAL, TIMESTAMP, Unicode,\
|
||||
UnicodeText, REAL
|
||||
|
||||
RESERVED_WORDS = set([
|
||||
"add", "all", "alter", "and",
|
||||
"any", "as", "asc", "backup",
|
||||
"begin", "between", "bigint", "binary",
|
||||
"bit", "bottom", "break", "by",
|
||||
"call", "capability", "cascade", "case",
|
||||
"cast", "char", "char_convert", "character",
|
||||
"check", "checkpoint", "close", "comment",
|
||||
"commit", "connect", "constraint", "contains",
|
||||
"continue", "convert", "create", "cross",
|
||||
"cube", "current", "current_timestamp", "current_user",
|
||||
"cursor", "date", "dbspace", "deallocate",
|
||||
"dec", "decimal", "declare", "default",
|
||||
"delete", "deleting", "desc", "distinct",
|
||||
"do", "double", "drop", "dynamic",
|
||||
"else", "elseif", "encrypted", "end",
|
||||
"endif", "escape", "except", "exception",
|
||||
"exec", "execute", "existing", "exists",
|
||||
"externlogin", "fetch", "first", "float",
|
||||
"for", "force", "foreign", "forward",
|
||||
"from", "full", "goto", "grant",
|
||||
"group", "having", "holdlock", "identified",
|
||||
"if", "in", "index", "index_lparen",
|
||||
"inner", "inout", "insensitive", "insert",
|
||||
"inserting", "install", "instead", "int",
|
||||
"integer", "integrated", "intersect", "into",
|
||||
"iq", "is", "isolation", "join",
|
||||
"key", "lateral", "left", "like",
|
||||
"lock", "login", "long", "match",
|
||||
"membership", "message", "mode", "modify",
|
||||
"natural", "new", "no", "noholdlock",
|
||||
"not", "notify", "null", "numeric",
|
||||
"of", "off", "on", "open",
|
||||
"option", "options", "or", "order",
|
||||
"others", "out", "outer", "over",
|
||||
"passthrough", "precision", "prepare", "primary",
|
||||
"print", "privileges", "proc", "procedure",
|
||||
"publication", "raiserror", "readtext", "real",
|
||||
"reference", "references", "release", "remote",
|
||||
"remove", "rename", "reorganize", "resource",
|
||||
"restore", "restrict", "return", "revoke",
|
||||
"right", "rollback", "rollup", "save",
|
||||
"savepoint", "scroll", "select", "sensitive",
|
||||
"session", "set", "setuser", "share",
|
||||
"smallint", "some", "sqlcode", "sqlstate",
|
||||
"start", "stop", "subtrans", "subtransaction",
|
||||
"synchronize", "syntax_error", "table", "temporary",
|
||||
"then", "time", "timestamp", "tinyint",
|
||||
"to", "top", "tran", "trigger",
|
||||
"truncate", "tsequal", "unbounded", "union",
|
||||
"unique", "unknown", "unsigned", "update",
|
||||
"updating", "user", "using", "validate",
|
||||
"values", "varbinary", "varchar", "variable",
|
||||
"varying", "view", "wait", "waitfor",
|
||||
"when", "where", "while", "window",
|
||||
"with", "with_cube", "with_lparen", "with_rollup",
|
||||
"within", "work", "writetext",
|
||||
])
|
||||
|
||||
|
||||
class _SybaseUnitypeMixin(object):
|
||||
"""these types appear to return a buffer object."""
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return str(value) # decode("ucs-2")
|
||||
else:
|
||||
return None
|
||||
return process
|
||||
|
||||
|
||||
class UNICHAR(_SybaseUnitypeMixin, sqltypes.Unicode):
|
||||
__visit_name__ = 'UNICHAR'
|
||||
|
||||
|
||||
class UNIVARCHAR(_SybaseUnitypeMixin, sqltypes.Unicode):
|
||||
__visit_name__ = 'UNIVARCHAR'
|
||||
|
||||
|
||||
class UNITEXT(_SybaseUnitypeMixin, sqltypes.UnicodeText):
|
||||
__visit_name__ = 'UNITEXT'
|
||||
|
||||
|
||||
class TINYINT(sqltypes.Integer):
|
||||
__visit_name__ = 'TINYINT'
|
||||
|
||||
|
||||
class BIT(sqltypes.TypeEngine):
|
||||
__visit_name__ = 'BIT'
|
||||
|
||||
|
||||
class MONEY(sqltypes.TypeEngine):
|
||||
__visit_name__ = "MONEY"
|
||||
|
||||
|
||||
class SMALLMONEY(sqltypes.TypeEngine):
|
||||
__visit_name__ = "SMALLMONEY"
|
||||
|
||||
|
||||
class UNIQUEIDENTIFIER(sqltypes.TypeEngine):
|
||||
__visit_name__ = "UNIQUEIDENTIFIER"
|
||||
|
||||
|
||||
class IMAGE(sqltypes.LargeBinary):
|
||||
__visit_name__ = 'IMAGE'
|
||||
|
||||
|
||||
class SybaseTypeCompiler(compiler.GenericTypeCompiler):
|
||||
def visit_large_binary(self, type_, **kw):
|
||||
return self.visit_IMAGE(type_)
|
||||
|
||||
def visit_boolean(self, type_, **kw):
|
||||
return self.visit_BIT(type_)
|
||||
|
||||
def visit_unicode(self, type_, **kw):
|
||||
return self.visit_NVARCHAR(type_)
|
||||
|
||||
def visit_UNICHAR(self, type_, **kw):
|
||||
return "UNICHAR(%d)" % type_.length
|
||||
|
||||
def visit_UNIVARCHAR(self, type_, **kw):
|
||||
return "UNIVARCHAR(%d)" % type_.length
|
||||
|
||||
def visit_UNITEXT(self, type_, **kw):
|
||||
return "UNITEXT"
|
||||
|
||||
def visit_TINYINT(self, type_, **kw):
|
||||
return "TINYINT"
|
||||
|
||||
def visit_IMAGE(self, type_, **kw):
|
||||
return "IMAGE"
|
||||
|
||||
def visit_BIT(self, type_, **kw):
|
||||
return "BIT"
|
||||
|
||||
def visit_MONEY(self, type_, **kw):
|
||||
return "MONEY"
|
||||
|
||||
def visit_SMALLMONEY(self, type_, **kw):
|
||||
return "SMALLMONEY"
|
||||
|
||||
def visit_UNIQUEIDENTIFIER(self, type_, **kw):
|
||||
return "UNIQUEIDENTIFIER"
|
||||
|
||||
ischema_names = {
|
||||
'bigint': BIGINT,
|
||||
'int': INTEGER,
|
||||
'integer': INTEGER,
|
||||
'smallint': SMALLINT,
|
||||
'tinyint': TINYINT,
|
||||
'unsigned bigint': BIGINT, # TODO: unsigned flags
|
||||
'unsigned int': INTEGER, # TODO: unsigned flags
|
||||
'unsigned smallint': SMALLINT, # TODO: unsigned flags
|
||||
'numeric': NUMERIC,
|
||||
'decimal': DECIMAL,
|
||||
'dec': DECIMAL,
|
||||
'float': FLOAT,
|
||||
'double': NUMERIC, # TODO
|
||||
'double precision': NUMERIC, # TODO
|
||||
'real': REAL,
|
||||
'smallmoney': SMALLMONEY,
|
||||
'money': MONEY,
|
||||
'smalldatetime': DATETIME,
|
||||
'datetime': DATETIME,
|
||||
'date': DATE,
|
||||
'time': TIME,
|
||||
'char': CHAR,
|
||||
'character': CHAR,
|
||||
'varchar': VARCHAR,
|
||||
'character varying': VARCHAR,
|
||||
'char varying': VARCHAR,
|
||||
'unichar': UNICHAR,
|
||||
'unicode character': UNIVARCHAR,
|
||||
'nchar': NCHAR,
|
||||
'national char': NCHAR,
|
||||
'national character': NCHAR,
|
||||
'nvarchar': NVARCHAR,
|
||||
'nchar varying': NVARCHAR,
|
||||
'national char varying': NVARCHAR,
|
||||
'national character varying': NVARCHAR,
|
||||
'text': TEXT,
|
||||
'unitext': UNITEXT,
|
||||
'binary': BINARY,
|
||||
'varbinary': VARBINARY,
|
||||
'image': IMAGE,
|
||||
'bit': BIT,
|
||||
|
||||
# not in documentation for ASE 15.7
|
||||
'long varchar': TEXT, # TODO
|
||||
'timestamp': TIMESTAMP,
|
||||
'uniqueidentifier': UNIQUEIDENTIFIER,
|
||||
|
||||
}
|
||||
|
||||
|
||||
class SybaseInspector(reflection.Inspector):
|
||||
|
||||
def __init__(self, conn):
|
||||
reflection.Inspector.__init__(self, conn)
|
||||
|
||||
def get_table_id(self, table_name, schema=None):
|
||||
"""Return the table id from `table_name` and `schema`."""
|
||||
|
||||
return self.dialect.get_table_id(self.bind, table_name, schema,
|
||||
info_cache=self.info_cache)
|
||||
|
||||
|
||||
class SybaseExecutionContext(default.DefaultExecutionContext):
|
||||
_enable_identity_insert = False
|
||||
|
||||
def set_ddl_autocommit(self, connection, value):
|
||||
"""Must be implemented by subclasses to accommodate DDL executions.
|
||||
|
||||
"connection" is the raw unwrapped DBAPI connection. "value"
|
||||
is True or False. when True, the connection should be configured
|
||||
such that a DDL can take place subsequently. when False,
|
||||
a DDL has taken place and the connection should be resumed
|
||||
into non-autocommit mode.
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def pre_exec(self):
|
||||
if self.isinsert:
|
||||
tbl = self.compiled.statement.table
|
||||
seq_column = tbl._autoincrement_column
|
||||
insert_has_sequence = seq_column is not None
|
||||
|
||||
if insert_has_sequence:
|
||||
self._enable_identity_insert = \
|
||||
seq_column.key in self.compiled_parameters[0]
|
||||
else:
|
||||
self._enable_identity_insert = False
|
||||
|
||||
if self._enable_identity_insert:
|
||||
self.cursor.execute(
|
||||
"SET IDENTITY_INSERT %s ON" %
|
||||
self.dialect.identifier_preparer.format_table(tbl))
|
||||
|
||||
if self.isddl:
|
||||
# TODO: to enhance this, we can detect "ddl in tran" on the
|
||||
# database settings. this error message should be improved to
|
||||
# include a note about that.
|
||||
if not self.should_autocommit:
|
||||
raise exc.InvalidRequestError(
|
||||
"The Sybase dialect only supports "
|
||||
"DDL in 'autocommit' mode at this time.")
|
||||
|
||||
self.root_connection.engine.logger.info(
|
||||
"AUTOCOMMIT (Assuming no Sybase 'ddl in tran')")
|
||||
|
||||
self.set_ddl_autocommit(
|
||||
self.root_connection.connection.connection,
|
||||
True)
|
||||
|
||||
def post_exec(self):
|
||||
if self.isddl:
|
||||
self.set_ddl_autocommit(self.root_connection, False)
|
||||
|
||||
if self._enable_identity_insert:
|
||||
self.cursor.execute(
|
||||
"SET IDENTITY_INSERT %s OFF" %
|
||||
self.dialect.identifier_preparer.
|
||||
format_table(self.compiled.statement.table)
|
||||
)
|
||||
|
||||
def get_lastrowid(self):
|
||||
cursor = self.create_cursor()
|
||||
cursor.execute("SELECT @@identity AS lastrowid")
|
||||
lastrowid = cursor.fetchone()[0]
|
||||
cursor.close()
|
||||
return lastrowid
|
||||
|
||||
|
||||
class SybaseSQLCompiler(compiler.SQLCompiler):
|
||||
ansi_bind_rules = True
|
||||
|
||||
extract_map = util.update_copy(
|
||||
compiler.SQLCompiler.extract_map,
|
||||
{
|
||||
'doy': 'dayofyear',
|
||||
'dow': 'weekday',
|
||||
'milliseconds': 'millisecond'
|
||||
})
|
||||
|
||||
def get_select_precolumns(self, select, **kw):
|
||||
s = select._distinct and "DISTINCT " or ""
|
||||
# TODO: don't think Sybase supports
|
||||
# bind params for FIRST / TOP
|
||||
limit = select._limit
|
||||
if limit:
|
||||
# if select._limit == 1:
|
||||
# s += "FIRST "
|
||||
# else:
|
||||
# s += "TOP %s " % (select._limit,)
|
||||
s += "TOP %s " % (limit,)
|
||||
offset = select._offset
|
||||
if offset:
|
||||
raise NotImplementedError("Sybase ASE does not support OFFSET")
|
||||
return s
|
||||
|
||||
def get_from_hint_text(self, table, text):
|
||||
return text
|
||||
|
||||
def limit_clause(self, select, **kw):
|
||||
# Limit in sybase is after the select keyword
|
||||
return ""
|
||||
|
||||
def visit_extract(self, extract, **kw):
|
||||
field = self.extract_map.get(extract.field, extract.field)
|
||||
return 'DATEPART("%s", %s)' % (
|
||||
field, self.process(extract.expr, **kw))
|
||||
|
||||
def visit_now_func(self, fn, **kw):
|
||||
return "GETDATE()"
|
||||
|
||||
def for_update_clause(self, select):
|
||||
# "FOR UPDATE" is only allowed on "DECLARE CURSOR"
|
||||
# which SQLAlchemy doesn't use
|
||||
return ''
|
||||
|
||||
def order_by_clause(self, select, **kw):
|
||||
kw['literal_binds'] = True
|
||||
order_by = self.process(select._order_by_clause, **kw)
|
||||
|
||||
# SybaseSQL only allows ORDER BY in subqueries if there is a LIMIT
|
||||
if order_by and (not self.is_subquery() or select._limit):
|
||||
return " ORDER BY " + order_by
|
||||
else:
|
||||
return ""
|
||||
|
||||
def delete_table_clause(self, delete_stmt, from_table,
|
||||
extra_froms):
|
||||
"""If we have extra froms make sure we render any alias as hint."""
|
||||
ashint = False
|
||||
if extra_froms:
|
||||
ashint = True
|
||||
return from_table._compiler_dispatch(
|
||||
self, asfrom=True, iscrud=True, ashint=ashint
|
||||
)
|
||||
|
||||
def delete_extra_from_clause(self, delete_stmt, from_table,
|
||||
extra_froms, from_hints, **kw):
|
||||
"""Render the DELETE .. FROM clause specific to Sybase."""
|
||||
return "FROM " + ', '.join(
|
||||
t._compiler_dispatch(self, asfrom=True,
|
||||
fromhints=from_hints, **kw)
|
||||
for t in [from_table] + extra_froms)
|
||||
|
||||
|
||||
class SybaseDDLCompiler(compiler.DDLCompiler):
|
||||
def get_column_specification(self, column, **kwargs):
|
||||
colspec = self.preparer.format_column(column) + " " + \
|
||||
self.dialect.type_compiler.process(
|
||||
column.type, type_expression=column)
|
||||
|
||||
if column.table is None:
|
||||
raise exc.CompileError(
|
||||
"The Sybase dialect requires Table-bound "
|
||||
"columns in order to generate DDL")
|
||||
seq_col = column.table._autoincrement_column
|
||||
|
||||
# install a IDENTITY Sequence if we have an implicit IDENTITY column
|
||||
if seq_col is column:
|
||||
sequence = isinstance(column.default, sa_schema.Sequence) \
|
||||
and column.default
|
||||
if sequence:
|
||||
start, increment = sequence.start or 1, \
|
||||
sequence.increment or 1
|
||||
else:
|
||||
start, increment = 1, 1
|
||||
if (start, increment) == (1, 1):
|
||||
colspec += " IDENTITY"
|
||||
else:
|
||||
# TODO: need correct syntax for this
|
||||
colspec += " IDENTITY(%s,%s)" % (start, increment)
|
||||
else:
|
||||
default = self.get_column_default_string(column)
|
||||
if default is not None:
|
||||
colspec += " DEFAULT " + default
|
||||
|
||||
if column.nullable is not None:
|
||||
if not column.nullable or column.primary_key:
|
||||
colspec += " NOT NULL"
|
||||
else:
|
||||
colspec += " NULL"
|
||||
|
||||
return colspec
|
||||
|
||||
def visit_drop_index(self, drop):
|
||||
index = drop.element
|
||||
return "\nDROP INDEX %s.%s" % (
|
||||
self.preparer.quote_identifier(index.table.name),
|
||||
self._prepared_index_name(drop.element,
|
||||
include_schema=False)
|
||||
)
|
||||
|
||||
|
||||
class SybaseIdentifierPreparer(compiler.IdentifierPreparer):
|
||||
reserved_words = RESERVED_WORDS
|
||||
|
||||
|
||||
class SybaseDialect(default.DefaultDialect):
|
||||
name = 'sybase'
|
||||
supports_unicode_statements = False
|
||||
supports_sane_rowcount = False
|
||||
supports_sane_multi_rowcount = False
|
||||
|
||||
supports_native_boolean = False
|
||||
supports_unicode_binds = False
|
||||
postfetch_lastrowid = True
|
||||
|
||||
colspecs = {}
|
||||
ischema_names = ischema_names
|
||||
|
||||
type_compiler = SybaseTypeCompiler
|
||||
statement_compiler = SybaseSQLCompiler
|
||||
ddl_compiler = SybaseDDLCompiler
|
||||
preparer = SybaseIdentifierPreparer
|
||||
inspector = SybaseInspector
|
||||
|
||||
construct_arguments = []
|
||||
|
||||
def _get_default_schema_name(self, connection):
|
||||
return connection.scalar(
|
||||
text("SELECT user_name() as user_name",
|
||||
typemap={'user_name': Unicode})
|
||||
)
|
||||
|
||||
def initialize(self, connection):
|
||||
super(SybaseDialect, self).initialize(connection)
|
||||
if self.server_version_info is not None and\
|
||||
self.server_version_info < (15, ):
|
||||
self.max_identifier_length = 30
|
||||
else:
|
||||
self.max_identifier_length = 255
|
||||
|
||||
def get_table_id(self, connection, table_name, schema=None, **kw):
|
||||
"""Fetch the id for schema.table_name.
|
||||
|
||||
Several reflection methods require the table id. The idea for using
|
||||
this method is that it can be fetched one time and cached for
|
||||
subsequent calls.
|
||||
|
||||
"""
|
||||
|
||||
table_id = None
|
||||
if schema is None:
|
||||
schema = self.default_schema_name
|
||||
|
||||
TABLEID_SQL = text("""
|
||||
SELECT o.id AS id
|
||||
FROM sysobjects o JOIN sysusers u ON o.uid=u.uid
|
||||
WHERE u.name = :schema_name
|
||||
AND o.name = :table_name
|
||||
AND o.type in ('U', 'V')
|
||||
""")
|
||||
|
||||
if util.py2k:
|
||||
if isinstance(schema, unicode):
|
||||
schema = schema.encode("ascii")
|
||||
if isinstance(table_name, unicode):
|
||||
table_name = table_name.encode("ascii")
|
||||
result = connection.execute(TABLEID_SQL,
|
||||
schema_name=schema,
|
||||
table_name=table_name)
|
||||
table_id = result.scalar()
|
||||
if table_id is None:
|
||||
raise exc.NoSuchTableError(table_name)
|
||||
return table_id
|
||||
|
||||
@reflection.cache
|
||||
def get_columns(self, connection, table_name, schema=None, **kw):
|
||||
table_id = self.get_table_id(connection, table_name, schema,
|
||||
info_cache=kw.get("info_cache"))
|
||||
|
||||
COLUMN_SQL = text("""
|
||||
SELECT col.name AS name,
|
||||
t.name AS type,
|
||||
(col.status & 8) AS nullable,
|
||||
(col.status & 128) AS autoincrement,
|
||||
com.text AS 'default',
|
||||
col.prec AS precision,
|
||||
col.scale AS scale,
|
||||
col.length AS length
|
||||
FROM systypes t, syscolumns col LEFT OUTER JOIN syscomments com ON
|
||||
col.cdefault = com.id
|
||||
WHERE col.usertype = t.usertype
|
||||
AND col.id = :table_id
|
||||
ORDER BY col.colid
|
||||
""")
|
||||
|
||||
results = connection.execute(COLUMN_SQL, table_id=table_id)
|
||||
|
||||
columns = []
|
||||
for (name, type_, nullable, autoincrement, default, precision, scale,
|
||||
length) in results:
|
||||
col_info = self._get_column_info(name, type_, bool(nullable),
|
||||
bool(autoincrement),
|
||||
default, precision, scale,
|
||||
length)
|
||||
columns.append(col_info)
|
||||
|
||||
return columns
|
||||
|
||||
def _get_column_info(self, name, type_, nullable, autoincrement, default,
|
||||
precision, scale, length):
|
||||
|
||||
coltype = self.ischema_names.get(type_, None)
|
||||
|
||||
kwargs = {}
|
||||
|
||||
if coltype in (NUMERIC, DECIMAL):
|
||||
args = (precision, scale)
|
||||
elif coltype == FLOAT:
|
||||
args = (precision,)
|
||||
elif coltype in (CHAR, VARCHAR, UNICHAR, UNIVARCHAR, NCHAR, NVARCHAR):
|
||||
args = (length,)
|
||||
else:
|
||||
args = ()
|
||||
|
||||
if coltype:
|
||||
coltype = coltype(*args, **kwargs)
|
||||
# is this necessary
|
||||
# if is_array:
|
||||
# coltype = ARRAY(coltype)
|
||||
else:
|
||||
util.warn("Did not recognize type '%s' of column '%s'" %
|
||||
(type_, name))
|
||||
coltype = sqltypes.NULLTYPE
|
||||
|
||||
if default:
|
||||
default = default.replace("DEFAULT", "").strip()
|
||||
default = re.sub("^'(.*)'$", lambda m: m.group(1), default)
|
||||
else:
|
||||
default = None
|
||||
|
||||
column_info = dict(name=name, type=coltype, nullable=nullable,
|
||||
default=default, autoincrement=autoincrement)
|
||||
return column_info
|
||||
|
||||
@reflection.cache
|
||||
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
|
||||
|
||||
table_id = self.get_table_id(connection, table_name, schema,
|
||||
info_cache=kw.get("info_cache"))
|
||||
|
||||
table_cache = {}
|
||||
column_cache = {}
|
||||
foreign_keys = []
|
||||
|
||||
table_cache[table_id] = {"name": table_name, "schema": schema}
|
||||
|
||||
COLUMN_SQL = text("""
|
||||
SELECT c.colid AS id, c.name AS name
|
||||
FROM syscolumns c
|
||||
WHERE c.id = :table_id
|
||||
""")
|
||||
|
||||
results = connection.execute(COLUMN_SQL, table_id=table_id)
|
||||
columns = {}
|
||||
for col in results:
|
||||
columns[col["id"]] = col["name"]
|
||||
column_cache[table_id] = columns
|
||||
|
||||
REFCONSTRAINT_SQL = text("""
|
||||
SELECT o.name AS name, r.reftabid AS reftable_id,
|
||||
r.keycnt AS 'count',
|
||||
r.fokey1 AS fokey1, r.fokey2 AS fokey2, r.fokey3 AS fokey3,
|
||||
r.fokey4 AS fokey4, r.fokey5 AS fokey5, r.fokey6 AS fokey6,
|
||||
r.fokey7 AS fokey7, r.fokey1 AS fokey8, r.fokey9 AS fokey9,
|
||||
r.fokey10 AS fokey10, r.fokey11 AS fokey11, r.fokey12 AS fokey12,
|
||||
r.fokey13 AS fokey13, r.fokey14 AS fokey14, r.fokey15 AS fokey15,
|
||||
r.fokey16 AS fokey16,
|
||||
r.refkey1 AS refkey1, r.refkey2 AS refkey2, r.refkey3 AS refkey3,
|
||||
r.refkey4 AS refkey4, r.refkey5 AS refkey5, r.refkey6 AS refkey6,
|
||||
r.refkey7 AS refkey7, r.refkey1 AS refkey8, r.refkey9 AS refkey9,
|
||||
r.refkey10 AS refkey10, r.refkey11 AS refkey11,
|
||||
r.refkey12 AS refkey12, r.refkey13 AS refkey13,
|
||||
r.refkey14 AS refkey14, r.refkey15 AS refkey15,
|
||||
r.refkey16 AS refkey16
|
||||
FROM sysreferences r JOIN sysobjects o on r.tableid = o.id
|
||||
WHERE r.tableid = :table_id
|
||||
""")
|
||||
referential_constraints = connection.execute(
|
||||
REFCONSTRAINT_SQL, table_id=table_id).fetchall()
|
||||
|
||||
REFTABLE_SQL = text("""
|
||||
SELECT o.name AS name, u.name AS 'schema'
|
||||
FROM sysobjects o JOIN sysusers u ON o.uid = u.uid
|
||||
WHERE o.id = :table_id
|
||||
""")
|
||||
|
||||
for r in referential_constraints:
|
||||
reftable_id = r["reftable_id"]
|
||||
|
||||
if reftable_id not in table_cache:
|
||||
c = connection.execute(REFTABLE_SQL, table_id=reftable_id)
|
||||
reftable = c.fetchone()
|
||||
c.close()
|
||||
table_info = {"name": reftable["name"], "schema": None}
|
||||
if (schema is not None or
|
||||
reftable["schema"] != self.default_schema_name):
|
||||
table_info["schema"] = reftable["schema"]
|
||||
|
||||
table_cache[reftable_id] = table_info
|
||||
results = connection.execute(COLUMN_SQL, table_id=reftable_id)
|
||||
reftable_columns = {}
|
||||
for col in results:
|
||||
reftable_columns[col["id"]] = col["name"]
|
||||
column_cache[reftable_id] = reftable_columns
|
||||
|
||||
reftable = table_cache[reftable_id]
|
||||
reftable_columns = column_cache[reftable_id]
|
||||
|
||||
constrained_columns = []
|
||||
referred_columns = []
|
||||
for i in range(1, r["count"] + 1):
|
||||
constrained_columns.append(columns[r["fokey%i" % i]])
|
||||
referred_columns.append(reftable_columns[r["refkey%i" % i]])
|
||||
|
||||
fk_info = {
|
||||
"constrained_columns": constrained_columns,
|
||||
"referred_schema": reftable["schema"],
|
||||
"referred_table": reftable["name"],
|
||||
"referred_columns": referred_columns,
|
||||
"name": r["name"]
|
||||
}
|
||||
|
||||
foreign_keys.append(fk_info)
|
||||
|
||||
return foreign_keys
|
||||
|
||||
@reflection.cache
|
||||
def get_indexes(self, connection, table_name, schema=None, **kw):
|
||||
table_id = self.get_table_id(connection, table_name, schema,
|
||||
info_cache=kw.get("info_cache"))
|
||||
|
||||
INDEX_SQL = text("""
|
||||
SELECT object_name(i.id) AS table_name,
|
||||
i.keycnt AS 'count',
|
||||
i.name AS name,
|
||||
(i.status & 0x2) AS 'unique',
|
||||
index_col(object_name(i.id), i.indid, 1) AS col_1,
|
||||
index_col(object_name(i.id), i.indid, 2) AS col_2,
|
||||
index_col(object_name(i.id), i.indid, 3) AS col_3,
|
||||
index_col(object_name(i.id), i.indid, 4) AS col_4,
|
||||
index_col(object_name(i.id), i.indid, 5) AS col_5,
|
||||
index_col(object_name(i.id), i.indid, 6) AS col_6,
|
||||
index_col(object_name(i.id), i.indid, 7) AS col_7,
|
||||
index_col(object_name(i.id), i.indid, 8) AS col_8,
|
||||
index_col(object_name(i.id), i.indid, 9) AS col_9,
|
||||
index_col(object_name(i.id), i.indid, 10) AS col_10,
|
||||
index_col(object_name(i.id), i.indid, 11) AS col_11,
|
||||
index_col(object_name(i.id), i.indid, 12) AS col_12,
|
||||
index_col(object_name(i.id), i.indid, 13) AS col_13,
|
||||
index_col(object_name(i.id), i.indid, 14) AS col_14,
|
||||
index_col(object_name(i.id), i.indid, 15) AS col_15,
|
||||
index_col(object_name(i.id), i.indid, 16) AS col_16
|
||||
FROM sysindexes i, sysobjects o
|
||||
WHERE o.id = i.id
|
||||
AND o.id = :table_id
|
||||
AND (i.status & 2048) = 0
|
||||
AND i.indid BETWEEN 1 AND 254
|
||||
""")
|
||||
|
||||
results = connection.execute(INDEX_SQL, table_id=table_id)
|
||||
indexes = []
|
||||
for r in results:
|
||||
column_names = []
|
||||
for i in range(1, r["count"]):
|
||||
column_names.append(r["col_%i" % (i,)])
|
||||
index_info = {"name": r["name"],
|
||||
"unique": bool(r["unique"]),
|
||||
"column_names": column_names}
|
||||
indexes.append(index_info)
|
||||
|
||||
return indexes
|
||||
|
||||
@reflection.cache
|
||||
def get_pk_constraint(self, connection, table_name, schema=None, **kw):
|
||||
table_id = self.get_table_id(connection, table_name, schema,
|
||||
info_cache=kw.get("info_cache"))
|
||||
|
||||
PK_SQL = text("""
|
||||
SELECT object_name(i.id) AS table_name,
|
||||
i.keycnt AS 'count',
|
||||
i.name AS name,
|
||||
index_col(object_name(i.id), i.indid, 1) AS pk_1,
|
||||
index_col(object_name(i.id), i.indid, 2) AS pk_2,
|
||||
index_col(object_name(i.id), i.indid, 3) AS pk_3,
|
||||
index_col(object_name(i.id), i.indid, 4) AS pk_4,
|
||||
index_col(object_name(i.id), i.indid, 5) AS pk_5,
|
||||
index_col(object_name(i.id), i.indid, 6) AS pk_6,
|
||||
index_col(object_name(i.id), i.indid, 7) AS pk_7,
|
||||
index_col(object_name(i.id), i.indid, 8) AS pk_8,
|
||||
index_col(object_name(i.id), i.indid, 9) AS pk_9,
|
||||
index_col(object_name(i.id), i.indid, 10) AS pk_10,
|
||||
index_col(object_name(i.id), i.indid, 11) AS pk_11,
|
||||
index_col(object_name(i.id), i.indid, 12) AS pk_12,
|
||||
index_col(object_name(i.id), i.indid, 13) AS pk_13,
|
||||
index_col(object_name(i.id), i.indid, 14) AS pk_14,
|
||||
index_col(object_name(i.id), i.indid, 15) AS pk_15,
|
||||
index_col(object_name(i.id), i.indid, 16) AS pk_16
|
||||
FROM sysindexes i, sysobjects o
|
||||
WHERE o.id = i.id
|
||||
AND o.id = :table_id
|
||||
AND (i.status & 2048) = 2048
|
||||
AND i.indid BETWEEN 1 AND 254
|
||||
""")
|
||||
|
||||
results = connection.execute(PK_SQL, table_id=table_id)
|
||||
pks = results.fetchone()
|
||||
results.close()
|
||||
|
||||
constrained_columns = []
|
||||
if pks:
|
||||
for i in range(1, pks["count"] + 1):
|
||||
constrained_columns.append(pks["pk_%i" % (i,)])
|
||||
return {"constrained_columns": constrained_columns,
|
||||
"name": pks["name"]}
|
||||
else:
|
||||
return {"constrained_columns": [], "name": None}
|
||||
|
||||
@reflection.cache
|
||||
def get_schema_names(self, connection, **kw):
|
||||
|
||||
SCHEMA_SQL = text("SELECT u.name AS name FROM sysusers u")
|
||||
|
||||
schemas = connection.execute(SCHEMA_SQL)
|
||||
|
||||
return [s["name"] for s in schemas]
|
||||
|
||||
@reflection.cache
|
||||
def get_table_names(self, connection, schema=None, **kw):
|
||||
if schema is None:
|
||||
schema = self.default_schema_name
|
||||
|
||||
TABLE_SQL = text("""
|
||||
SELECT o.name AS name
|
||||
FROM sysobjects o JOIN sysusers u ON o.uid = u.uid
|
||||
WHERE u.name = :schema_name
|
||||
AND o.type = 'U'
|
||||
""")
|
||||
|
||||
if util.py2k:
|
||||
if isinstance(schema, unicode):
|
||||
schema = schema.encode("ascii")
|
||||
|
||||
tables = connection.execute(TABLE_SQL, schema_name=schema)
|
||||
|
||||
return [t["name"] for t in tables]
|
||||
|
||||
@reflection.cache
|
||||
def get_view_definition(self, connection, view_name, schema=None, **kw):
|
||||
if schema is None:
|
||||
schema = self.default_schema_name
|
||||
|
||||
VIEW_DEF_SQL = text("""
|
||||
SELECT c.text
|
||||
FROM syscomments c JOIN sysobjects o ON c.id = o.id
|
||||
WHERE o.name = :view_name
|
||||
AND o.type = 'V'
|
||||
""")
|
||||
|
||||
if util.py2k:
|
||||
if isinstance(view_name, unicode):
|
||||
view_name = view_name.encode("ascii")
|
||||
|
||||
view = connection.execute(VIEW_DEF_SQL, view_name=view_name)
|
||||
|
||||
return view.scalar()
|
||||
|
||||
@reflection.cache
|
||||
def get_view_names(self, connection, schema=None, **kw):
|
||||
if schema is None:
|
||||
schema = self.default_schema_name
|
||||
|
||||
VIEW_SQL = text("""
|
||||
SELECT o.name AS name
|
||||
FROM sysobjects o JOIN sysusers u ON o.uid = u.uid
|
||||
WHERE u.name = :schema_name
|
||||
AND o.type = 'V'
|
||||
""")
|
||||
|
||||
if util.py2k:
|
||||
if isinstance(schema, unicode):
|
||||
schema = schema.encode("ascii")
|
||||
views = connection.execute(VIEW_SQL, schema_name=schema)
|
||||
|
||||
return [v["name"] for v in views]
|
||||
|
||||
def has_table(self, connection, table_name, schema=None):
|
||||
try:
|
||||
self.get_table_id(connection, table_name, schema)
|
||||
except exc.NoSuchTableError:
|
||||
return False
|
||||
else:
|
||||
return True
|
|
@ -0,0 +1,33 @@
|
|||
# sybase/mxodbc.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
"""
|
||||
|
||||
.. dialect:: sybase+mxodbc
|
||||
:name: mxODBC
|
||||
:dbapi: mxodbc
|
||||
:connectstring: sybase+mxodbc://<username>:<password>@<dsnname>
|
||||
:url: http://www.egenix.com/
|
||||
|
||||
.. note::
|
||||
|
||||
This dialect is a stub only and is likely non functional at this time.
|
||||
|
||||
|
||||
"""
|
||||
from sqlalchemy.dialects.sybase.base import SybaseDialect
|
||||
from sqlalchemy.dialects.sybase.base import SybaseExecutionContext
|
||||
from sqlalchemy.connectors.mxodbc import MxODBCConnector
|
||||
|
||||
|
||||
class SybaseExecutionContext_mxodbc(SybaseExecutionContext):
|
||||
pass
|
||||
|
||||
|
||||
class SybaseDialect_mxodbc(MxODBCConnector, SybaseDialect):
|
||||
execution_ctx_cls = SybaseExecutionContext_mxodbc
|
||||
|
||||
dialect = SybaseDialect_mxodbc
|
|
@ -0,0 +1,86 @@
|
|||
# sybase/pyodbc.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: sybase+pyodbc
|
||||
:name: PyODBC
|
||||
:dbapi: pyodbc
|
||||
:connectstring: sybase+pyodbc://<username>:<password>@<dsnname>\
|
||||
[/<database>]
|
||||
:url: http://pypi.python.org/pypi/pyodbc/
|
||||
|
||||
|
||||
Unicode Support
|
||||
---------------
|
||||
|
||||
The pyodbc driver currently supports usage of these Sybase types with
|
||||
Unicode or multibyte strings::
|
||||
|
||||
CHAR
|
||||
NCHAR
|
||||
NVARCHAR
|
||||
TEXT
|
||||
VARCHAR
|
||||
|
||||
Currently *not* supported are::
|
||||
|
||||
UNICHAR
|
||||
UNITEXT
|
||||
UNIVARCHAR
|
||||
|
||||
"""
|
||||
|
||||
from sqlalchemy.dialects.sybase.base import SybaseDialect,\
|
||||
SybaseExecutionContext
|
||||
from sqlalchemy.connectors.pyodbc import PyODBCConnector
|
||||
from sqlalchemy import types as sqltypes, processors
|
||||
import decimal
|
||||
|
||||
|
||||
class _SybNumeric_pyodbc(sqltypes.Numeric):
|
||||
"""Turns Decimals with adjusted() < -6 into floats.
|
||||
|
||||
It's not yet known how to get decimals with many
|
||||
significant digits or very large adjusted() into Sybase
|
||||
via pyodbc.
|
||||
|
||||
"""
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
super_process = super(_SybNumeric_pyodbc, self).\
|
||||
bind_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
if self.asdecimal and \
|
||||
isinstance(value, decimal.Decimal):
|
||||
|
||||
if value.adjusted() < -6:
|
||||
return processors.to_float(value)
|
||||
|
||||
if super_process:
|
||||
return super_process(value)
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
class SybaseExecutionContext_pyodbc(SybaseExecutionContext):
|
||||
def set_ddl_autocommit(self, connection, value):
|
||||
if value:
|
||||
connection.autocommit = True
|
||||
else:
|
||||
connection.autocommit = False
|
||||
|
||||
|
||||
class SybaseDialect_pyodbc(PyODBCConnector, SybaseDialect):
|
||||
execution_ctx_cls = SybaseExecutionContext_pyodbc
|
||||
|
||||
colspecs = {
|
||||
sqltypes.Numeric: _SybNumeric_pyodbc,
|
||||
}
|
||||
|
||||
dialect = SybaseDialect_pyodbc
|
|
@ -0,0 +1,102 @@
|
|||
# sybase/pysybase.py
|
||||
# Copyright (C) 2010-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: sybase+pysybase
|
||||
:name: Python-Sybase
|
||||
:dbapi: Sybase
|
||||
:connectstring: sybase+pysybase://<username>:<password>@<dsn>/\
|
||||
[database name]
|
||||
:url: http://python-sybase.sourceforge.net/
|
||||
|
||||
Unicode Support
|
||||
---------------
|
||||
|
||||
The python-sybase driver does not appear to support non-ASCII strings of any
|
||||
kind at this time.
|
||||
|
||||
"""
|
||||
|
||||
from sqlalchemy import types as sqltypes, processors
|
||||
from sqlalchemy.dialects.sybase.base import SybaseDialect, \
|
||||
SybaseExecutionContext, SybaseSQLCompiler
|
||||
|
||||
|
||||
class _SybNumeric(sqltypes.Numeric):
|
||||
def result_processor(self, dialect, type_):
|
||||
if not self.asdecimal:
|
||||
return processors.to_float
|
||||
else:
|
||||
return sqltypes.Numeric.result_processor(self, dialect, type_)
|
||||
|
||||
|
||||
class SybaseExecutionContext_pysybase(SybaseExecutionContext):
|
||||
|
||||
def set_ddl_autocommit(self, dbapi_connection, value):
|
||||
if value:
|
||||
# call commit() on the Sybase connection directly,
|
||||
# to avoid any side effects of calling a Connection
|
||||
# transactional method inside of pre_exec()
|
||||
dbapi_connection.commit()
|
||||
|
||||
def pre_exec(self):
|
||||
SybaseExecutionContext.pre_exec(self)
|
||||
|
||||
for param in self.parameters:
|
||||
for key in list(param):
|
||||
param["@" + key] = param[key]
|
||||
del param[key]
|
||||
|
||||
|
||||
class SybaseSQLCompiler_pysybase(SybaseSQLCompiler):
|
||||
def bindparam_string(self, name, **kw):
|
||||
return "@" + name
|
||||
|
||||
|
||||
class SybaseDialect_pysybase(SybaseDialect):
|
||||
driver = 'pysybase'
|
||||
execution_ctx_cls = SybaseExecutionContext_pysybase
|
||||
statement_compiler = SybaseSQLCompiler_pysybase
|
||||
|
||||
colspecs = {
|
||||
sqltypes.Numeric: _SybNumeric,
|
||||
sqltypes.Float: sqltypes.Float
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
import Sybase
|
||||
return Sybase
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user', password='passwd')
|
||||
|
||||
return ([opts.pop('host')], opts)
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context=None):
|
||||
# calling python-sybase executemany yields:
|
||||
# TypeError: string too long for buffer
|
||||
for param in parameters:
|
||||
cursor.execute(statement, param)
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
vers = connection.scalar("select @@version_number")
|
||||
# i.e. 15500, 15000, 12500 == (15, 5, 0, 0), (15, 0, 0, 0),
|
||||
# (12, 5, 0, 0)
|
||||
return (vers / 1000, vers % 1000 / 100, vers % 100 / 10, vers % 10)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, (self.dbapi.OperationalError,
|
||||
self.dbapi.ProgrammingError)):
|
||||
msg = str(e)
|
||||
return ('Unable to complete network request to host' in msg or
|
||||
'Invalid connection state' in msg or
|
||||
'Invalid cursor state' in msg)
|
||||
else:
|
||||
return False
|
||||
|
||||
dialect = SybaseDialect_pysybase
|
|
@ -0,0 +1,471 @@
|
|||
# engine/__init__.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""SQL connections, SQL execution and high-level DB-API interface.
|
||||
|
||||
The engine package defines the basic components used to interface
|
||||
DB-API modules with higher-level statement construction,
|
||||
connection-management, execution and result contexts. The primary
|
||||
"entry point" class into this package is the Engine and its public
|
||||
constructor ``create_engine()``.
|
||||
|
||||
This package includes:
|
||||
|
||||
base.py
|
||||
Defines interface classes and some implementation classes which
|
||||
comprise the basic components used to interface between a DB-API,
|
||||
constructed and plain-text statements, connections, transactions,
|
||||
and results.
|
||||
|
||||
default.py
|
||||
Contains default implementations of some of the components defined
|
||||
in base.py. All current database dialects use the classes in
|
||||
default.py as base classes for their own database-specific
|
||||
implementations.
|
||||
|
||||
strategies.py
|
||||
The mechanics of constructing ``Engine`` objects are represented
|
||||
here. Defines the ``EngineStrategy`` class which represents how
|
||||
to go from arguments specified to the ``create_engine()``
|
||||
function, to a fully constructed ``Engine``, including
|
||||
initialization of connection pooling, dialects, and specific
|
||||
subclasses of ``Engine``.
|
||||
|
||||
threadlocal.py
|
||||
The ``TLEngine`` class is defined here, which is a subclass of
|
||||
the generic ``Engine`` and tracks ``Connection`` and
|
||||
``Transaction`` objects against the identity of the current
|
||||
thread. This allows certain programming patterns based around
|
||||
the concept of a "thread-local connection" to be possible.
|
||||
The ``TLEngine`` is created by using the "threadlocal" engine
|
||||
strategy in conjunction with the ``create_engine()`` function.
|
||||
|
||||
url.py
|
||||
Defines the ``URL`` class which represents the individual
|
||||
components of a string URL passed to ``create_engine()``. Also
|
||||
defines a basic module-loading strategy for the dialect specifier
|
||||
within a URL.
|
||||
"""
|
||||
|
||||
from .interfaces import (
|
||||
Connectable,
|
||||
CreateEnginePlugin,
|
||||
Dialect,
|
||||
ExecutionContext,
|
||||
ExceptionContext,
|
||||
|
||||
# backwards compat
|
||||
Compiled,
|
||||
TypeCompiler
|
||||
)
|
||||
|
||||
from .base import (
|
||||
Connection,
|
||||
Engine,
|
||||
NestedTransaction,
|
||||
RootTransaction,
|
||||
Transaction,
|
||||
TwoPhaseTransaction,
|
||||
)
|
||||
|
||||
from .result import (
|
||||
BaseRowProxy,
|
||||
BufferedColumnResultProxy,
|
||||
BufferedColumnRow,
|
||||
BufferedRowResultProxy,
|
||||
FullyBufferedResultProxy,
|
||||
ResultProxy,
|
||||
RowProxy,
|
||||
)
|
||||
|
||||
from .util import (
|
||||
connection_memoize
|
||||
)
|
||||
|
||||
|
||||
from . import util, strategies
|
||||
|
||||
# backwards compat
|
||||
from ..sql import ddl
|
||||
|
||||
default_strategy = 'plain'
|
||||
|
||||
|
||||
def create_engine(*args, **kwargs):
|
||||
"""Create a new :class:`.Engine` instance.
|
||||
|
||||
The standard calling form is to send the URL as the
|
||||
first positional argument, usually a string
|
||||
that indicates database dialect and connection arguments::
|
||||
|
||||
|
||||
engine = create_engine("postgresql://scott:tiger@localhost/test")
|
||||
|
||||
Additional keyword arguments may then follow it which
|
||||
establish various options on the resulting :class:`.Engine`
|
||||
and its underlying :class:`.Dialect` and :class:`.Pool`
|
||||
constructs::
|
||||
|
||||
engine = create_engine("mysql://scott:tiger@hostname/dbname",
|
||||
encoding='latin1', echo=True)
|
||||
|
||||
The string form of the URL is
|
||||
``dialect[+driver]://user:password@host/dbname[?key=value..]``, where
|
||||
``dialect`` is a database name such as ``mysql``, ``oracle``,
|
||||
``postgresql``, etc., and ``driver`` the name of a DBAPI, such as
|
||||
``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively,
|
||||
the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`.
|
||||
|
||||
``**kwargs`` takes a wide variety of options which are routed
|
||||
towards their appropriate components. Arguments may be specific to
|
||||
the :class:`.Engine`, the underlying :class:`.Dialect`, as well as the
|
||||
:class:`.Pool`. Specific dialects also accept keyword arguments that
|
||||
are unique to that dialect. Here, we describe the parameters
|
||||
that are common to most :func:`.create_engine()` usage.
|
||||
|
||||
Once established, the newly resulting :class:`.Engine` will
|
||||
request a connection from the underlying :class:`.Pool` once
|
||||
:meth:`.Engine.connect` is called, or a method which depends on it
|
||||
such as :meth:`.Engine.execute` is invoked. The :class:`.Pool` in turn
|
||||
will establish the first actual DBAPI connection when this request
|
||||
is received. The :func:`.create_engine` call itself does **not**
|
||||
establish any actual DBAPI connections directly.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:doc:`/core/engines`
|
||||
|
||||
:doc:`/dialects/index`
|
||||
|
||||
:ref:`connections_toplevel`
|
||||
|
||||
:param case_sensitive=True: if False, result column names
|
||||
will match in a case-insensitive fashion, that is,
|
||||
``row['SomeColumn']``.
|
||||
|
||||
.. versionchanged:: 0.8
|
||||
By default, result row names match case-sensitively.
|
||||
In version 0.7 and prior, all matches were case-insensitive.
|
||||
|
||||
:param connect_args: a dictionary of options which will be
|
||||
passed directly to the DBAPI's ``connect()`` method as
|
||||
additional keyword arguments. See the example
|
||||
at :ref:`custom_dbapi_args`.
|
||||
|
||||
:param convert_unicode=False: if set to True, sets
|
||||
the default behavior of ``convert_unicode`` on the
|
||||
:class:`.String` type to ``True``, regardless
|
||||
of a setting of ``False`` on an individual
|
||||
:class:`.String` type, thus causing all :class:`.String`
|
||||
-based columns
|
||||
to accommodate Python ``unicode`` objects. This flag
|
||||
is useful as an engine-wide setting when using a
|
||||
DBAPI that does not natively support Python
|
||||
``unicode`` objects and raises an error when
|
||||
one is received (such as pyodbc with FreeTDS).
|
||||
|
||||
See :class:`.String` for further details on
|
||||
what this flag indicates.
|
||||
|
||||
:param creator: a callable which returns a DBAPI connection.
|
||||
This creation function will be passed to the underlying
|
||||
connection pool and will be used to create all new database
|
||||
connections. Usage of this function causes connection
|
||||
parameters specified in the URL argument to be bypassed.
|
||||
|
||||
:param echo=False: if True, the Engine will log all statements
|
||||
as well as a repr() of their parameter lists to the engines
|
||||
logger, which defaults to sys.stdout. The ``echo`` attribute of
|
||||
``Engine`` can be modified at any time to turn logging on and
|
||||
off. If set to the string ``"debug"``, result rows will be
|
||||
printed to the standard output as well. This flag ultimately
|
||||
controls a Python logger; see :ref:`dbengine_logging` for
|
||||
information on how to configure logging directly.
|
||||
|
||||
:param echo_pool=False: if True, the connection pool will log
|
||||
all checkouts/checkins to the logging stream, which defaults to
|
||||
sys.stdout. This flag ultimately controls a Python logger; see
|
||||
:ref:`dbengine_logging` for information on how to configure logging
|
||||
directly.
|
||||
|
||||
:param empty_in_strategy: The SQL compilation strategy to use when
|
||||
rendering an IN or NOT IN expression for :meth:`.ColumnOperators.in_`
|
||||
where the right-hand side
|
||||
is an empty set. This is a string value that may be one of
|
||||
``static``, ``dynamic``, or ``dynamic_warn``. The ``static``
|
||||
strategy is the default, and an IN comparison to an empty set
|
||||
will generate a simple false expression "1 != 1". The ``dynamic``
|
||||
strategy behaves like that of SQLAlchemy 1.1 and earlier, emitting
|
||||
a false expression of the form "expr != expr", which has the effect
|
||||
of evaluting to NULL in the case of a null expression.
|
||||
``dynamic_warn`` is the same as ``dynamic``, however also emits a
|
||||
warning when an empty set is encountered; this because the "dynamic"
|
||||
comparison is typically poorly performing on most databases.
|
||||
|
||||
.. versionadded:: 1.2 Added the ``empty_in_strategy`` setting and
|
||||
additionally defaulted the behavior for empty-set IN comparisons
|
||||
to a static boolean expression.
|
||||
|
||||
:param encoding: Defaults to ``utf-8``. This is the string
|
||||
encoding used by SQLAlchemy for string encode/decode
|
||||
operations which occur within SQLAlchemy, **outside of
|
||||
the DBAPI.** Most modern DBAPIs feature some degree of
|
||||
direct support for Python ``unicode`` objects,
|
||||
what you see in Python 2 as a string of the form
|
||||
``u'some string'``. For those scenarios where the
|
||||
DBAPI is detected as not supporting a Python ``unicode``
|
||||
object, this encoding is used to determine the
|
||||
source/destination encoding. It is **not used**
|
||||
for those cases where the DBAPI handles unicode
|
||||
directly.
|
||||
|
||||
To properly configure a system to accommodate Python
|
||||
``unicode`` objects, the DBAPI should be
|
||||
configured to handle unicode to the greatest
|
||||
degree as is appropriate - see
|
||||
the notes on unicode pertaining to the specific
|
||||
target database in use at :ref:`dialect_toplevel`.
|
||||
|
||||
Areas where string encoding may need to be accommodated
|
||||
outside of the DBAPI include zero or more of:
|
||||
|
||||
* the values passed to bound parameters, corresponding to
|
||||
the :class:`.Unicode` type or the :class:`.String` type
|
||||
when ``convert_unicode`` is ``True``;
|
||||
* the values returned in result set columns corresponding
|
||||
to the :class:`.Unicode` type or the :class:`.String`
|
||||
type when ``convert_unicode`` is ``True``;
|
||||
* the string SQL statement passed to the DBAPI's
|
||||
``cursor.execute()`` method;
|
||||
* the string names of the keys in the bound parameter
|
||||
dictionary passed to the DBAPI's ``cursor.execute()``
|
||||
as well as ``cursor.setinputsizes()`` methods;
|
||||
* the string column names retrieved from the DBAPI's
|
||||
``cursor.description`` attribute.
|
||||
|
||||
When using Python 3, the DBAPI is required to support
|
||||
*all* of the above values as Python ``unicode`` objects,
|
||||
which in Python 3 are just known as ``str``. In Python 2,
|
||||
the DBAPI does not specify unicode behavior at all,
|
||||
so SQLAlchemy must make decisions for each of the above
|
||||
values on a per-DBAPI basis - implementations are
|
||||
completely inconsistent in their behavior.
|
||||
|
||||
:param execution_options: Dictionary execution options which will
|
||||
be applied to all connections. See
|
||||
:meth:`~sqlalchemy.engine.Connection.execution_options`
|
||||
|
||||
:param implicit_returning=True: When ``True``, a RETURNING-
|
||||
compatible construct, if available, will be used to
|
||||
fetch newly generated primary key values when a single row
|
||||
INSERT statement is emitted with no existing returning()
|
||||
clause. This applies to those backends which support RETURNING
|
||||
or a compatible construct, including PostgreSQL, Firebird, Oracle,
|
||||
Microsoft SQL Server. Set this to ``False`` to disable
|
||||
the automatic usage of RETURNING.
|
||||
|
||||
:param isolation_level: this string parameter is interpreted by various
|
||||
dialects in order to affect the transaction isolation level of the
|
||||
database connection. The parameter essentially accepts some subset of
|
||||
these string arguments: ``"SERIALIZABLE"``, ``"REPEATABLE_READ"``,
|
||||
``"READ_COMMITTED"``, ``"READ_UNCOMMITTED"`` and ``"AUTOCOMMIT"``.
|
||||
Behavior here varies per backend, and
|
||||
individual dialects should be consulted directly.
|
||||
|
||||
Note that the isolation level can also be set on a per-:class:`.Connection`
|
||||
basis as well, using the
|
||||
:paramref:`.Connection.execution_options.isolation_level`
|
||||
feature.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Connection.default_isolation_level` - view default level
|
||||
|
||||
:paramref:`.Connection.execution_options.isolation_level`
|
||||
- set per :class:`.Connection` isolation level
|
||||
|
||||
:ref:`SQLite Transaction Isolation <sqlite_isolation_level>`
|
||||
|
||||
:ref:`PostgreSQL Transaction Isolation <postgresql_isolation_level>`
|
||||
|
||||
:ref:`MySQL Transaction Isolation <mysql_isolation_level>`
|
||||
|
||||
:ref:`session_transaction_isolation` - for the ORM
|
||||
|
||||
:param label_length=None: optional integer value which limits
|
||||
the size of dynamically generated column labels to that many
|
||||
characters. If less than 6, labels are generated as
|
||||
"_(counter)". If ``None``, the value of
|
||||
``dialect.max_identifier_length`` is used instead.
|
||||
|
||||
:param listeners: A list of one or more
|
||||
:class:`~sqlalchemy.interfaces.PoolListener` objects which will
|
||||
receive connection pool events.
|
||||
|
||||
:param logging_name: String identifier which will be used within
|
||||
the "name" field of logging records generated within the
|
||||
"sqlalchemy.engine" logger. Defaults to a hexstring of the
|
||||
object's id.
|
||||
|
||||
:param max_overflow=10: the number of connections to allow in
|
||||
connection pool "overflow", that is connections that can be
|
||||
opened above and beyond the pool_size setting, which defaults
|
||||
to five. this is only used with :class:`~sqlalchemy.pool.QueuePool`.
|
||||
|
||||
:param module=None: reference to a Python module object (the module
|
||||
itself, not its string name). Specifies an alternate DBAPI module to
|
||||
be used by the engine's dialect. Each sub-dialect references a
|
||||
specific DBAPI which will be imported before first connect. This
|
||||
parameter causes the import to be bypassed, and the given module to
|
||||
be used instead. Can be used for testing of DBAPIs as well as to
|
||||
inject "mock" DBAPI implementations into the :class:`.Engine`.
|
||||
|
||||
:param paramstyle=None: The `paramstyle <http://legacy.python.org/dev/peps/pep-0249/#paramstyle>`_
|
||||
to use when rendering bound parameters. This style defaults to the
|
||||
one recommended by the DBAPI itself, which is retrieved from the
|
||||
``.paramstyle`` attribute of the DBAPI. However, most DBAPIs accept
|
||||
more than one paramstyle, and in particular it may be desirable
|
||||
to change a "named" paramstyle into a "positional" one, or vice versa.
|
||||
When this attribute is passed, it should be one of the values
|
||||
``"qmark"``, ``"numeric"``, ``"named"``, ``"format"`` or
|
||||
``"pyformat"``, and should correspond to a parameter style known
|
||||
to be supported by the DBAPI in use.
|
||||
|
||||
:param pool=None: an already-constructed instance of
|
||||
:class:`~sqlalchemy.pool.Pool`, such as a
|
||||
:class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this
|
||||
pool will be used directly as the underlying connection pool
|
||||
for the engine, bypassing whatever connection parameters are
|
||||
present in the URL argument. For information on constructing
|
||||
connection pools manually, see :ref:`pooling_toplevel`.
|
||||
|
||||
:param poolclass=None: a :class:`~sqlalchemy.pool.Pool`
|
||||
subclass, which will be used to create a connection pool
|
||||
instance using the connection parameters given in the URL. Note
|
||||
this differs from ``pool`` in that you don't actually
|
||||
instantiate the pool in this case, you just indicate what type
|
||||
of pool to be used.
|
||||
|
||||
:param pool_logging_name: String identifier which will be used within
|
||||
the "name" field of logging records generated within the
|
||||
"sqlalchemy.pool" logger. Defaults to a hexstring of the object's
|
||||
id.
|
||||
|
||||
:param pool_pre_ping: boolean, if True will enable the connection pool
|
||||
"pre-ping" feature that tests connections for liveness upon
|
||||
each checkout.
|
||||
|
||||
.. versionadded:: 1.2
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`pool_disconnects_pessimistic`
|
||||
|
||||
:param pool_size=5: the number of connections to keep open
|
||||
inside the connection pool. This used with
|
||||
:class:`~sqlalchemy.pool.QueuePool` as
|
||||
well as :class:`~sqlalchemy.pool.SingletonThreadPool`. With
|
||||
:class:`~sqlalchemy.pool.QueuePool`, a ``pool_size`` setting
|
||||
of 0 indicates no limit; to disable pooling, set ``poolclass`` to
|
||||
:class:`~sqlalchemy.pool.NullPool` instead.
|
||||
|
||||
:param pool_recycle=-1: this setting causes the pool to recycle
|
||||
connections after the given number of seconds has passed. It
|
||||
defaults to -1, or no timeout. For example, setting to 3600
|
||||
means connections will be recycled after one hour. Note that
|
||||
MySQL in particular will disconnect automatically if no
|
||||
activity is detected on a connection for eight hours (although
|
||||
this is configurable with the MySQLDB connection itself and the
|
||||
server configuration as well).
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`pool_setting_recycle`
|
||||
|
||||
:param pool_reset_on_return='rollback': set the "reset on return"
|
||||
behavior of the pool, which is whether ``rollback()``,
|
||||
``commit()``, or nothing is called upon connections
|
||||
being returned to the pool. See the docstring for
|
||||
``reset_on_return`` at :class:`.Pool`.
|
||||
|
||||
.. versionadded:: 0.7.6
|
||||
|
||||
:param pool_timeout=30: number of seconds to wait before giving
|
||||
up on getting a connection from the pool. This is only used
|
||||
with :class:`~sqlalchemy.pool.QueuePool`.
|
||||
|
||||
:param plugins: string list of plugin names to load. See
|
||||
:class:`.CreateEnginePlugin` for background.
|
||||
|
||||
.. versionadded:: 1.2.3
|
||||
|
||||
:param strategy='plain': selects alternate engine implementations.
|
||||
Currently available are:
|
||||
|
||||
* the ``threadlocal`` strategy, which is described in
|
||||
:ref:`threadlocal_strategy`;
|
||||
* the ``mock`` strategy, which dispatches all statement
|
||||
execution to a function passed as the argument ``executor``.
|
||||
See `example in the FAQ
|
||||
<http://docs.sqlalchemy.org/en/latest/faq/metadata_schema.html#how-can-i-get-the-create-table-drop-table-output-as-a-string>`_.
|
||||
|
||||
:param executor=None: a function taking arguments
|
||||
``(sql, *multiparams, **params)``, to which the ``mock`` strategy will
|
||||
dispatch all statement execution. Used only by ``strategy='mock'``.
|
||||
|
||||
"""
|
||||
|
||||
strategy = kwargs.pop('strategy', default_strategy)
|
||||
strategy = strategies.strategies[strategy]
|
||||
return strategy.create(*args, **kwargs)
|
||||
|
||||
|
||||
def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
|
||||
"""Create a new Engine instance using a configuration dictionary.
|
||||
|
||||
The dictionary is typically produced from a config file.
|
||||
|
||||
The keys of interest to ``engine_from_config()`` should be prefixed, e.g.
|
||||
``sqlalchemy.url``, ``sqlalchemy.echo``, etc. The 'prefix' argument
|
||||
indicates the prefix to be searched for. Each matching key (after the
|
||||
prefix is stripped) is treated as though it were the corresponding keyword
|
||||
argument to a :func:`.create_engine` call.
|
||||
|
||||
The only required key is (assuming the default prefix) ``sqlalchemy.url``,
|
||||
which provides the :ref:`database URL <database_urls>`.
|
||||
|
||||
A select set of keyword arguments will be "coerced" to their
|
||||
expected type based on string values. The set of arguments
|
||||
is extensible per-dialect using the ``engine_config_types`` accessor.
|
||||
|
||||
:param configuration: A dictionary (typically produced from a config file,
|
||||
but this is not a requirement). Items whose keys start with the value
|
||||
of 'prefix' will have that prefix stripped, and will then be passed to
|
||||
:ref:`create_engine`.
|
||||
|
||||
:param prefix: Prefix to match and then strip from keys
|
||||
in 'configuration'.
|
||||
|
||||
:param kwargs: Each keyword argument to ``engine_from_config()`` itself
|
||||
overrides the corresponding item taken from the 'configuration'
|
||||
dictionary. Keyword arguments should *not* be prefixed.
|
||||
|
||||
"""
|
||||
|
||||
options = dict((key[len(prefix):], configuration[key])
|
||||
for key in configuration
|
||||
if key.startswith(prefix))
|
||||
options['_coerce_config'] = True
|
||||
options.update(kwargs)
|
||||
url = options.pop('url')
|
||||
return create_engine(url, **options)
|
||||
|
||||
|
||||
__all__ = (
|
||||
'create_engine',
|
||||
'engine_from_config',
|
||||
)
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,877 @@
|
|||
# engine/reflection.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Provides an abstraction for obtaining database schema information.
|
||||
|
||||
Usage Notes:
|
||||
|
||||
Here are some general conventions when accessing the low level inspector
|
||||
methods such as get_table_names, get_columns, etc.
|
||||
|
||||
1. Inspector methods return lists of dicts in most cases for the following
|
||||
reasons:
|
||||
|
||||
* They're both standard types that can be serialized.
|
||||
* Using a dict instead of a tuple allows easy expansion of attributes.
|
||||
* Using a list for the outer structure maintains order and is easy to work
|
||||
with (e.g. list comprehension [d['name'] for d in cols]).
|
||||
|
||||
2. Records that contain a name, such as the column name in a column record
|
||||
use the key 'name'. So for most return values, each record will have a
|
||||
'name' attribute..
|
||||
"""
|
||||
|
||||
from .. import exc, sql
|
||||
from ..sql import schema as sa_schema
|
||||
from .. import util
|
||||
from ..sql.type_api import TypeEngine
|
||||
from ..util import deprecated
|
||||
from ..util import topological
|
||||
from .. import inspection
|
||||
from .base import Connectable
|
||||
|
||||
|
||||
@util.decorator
|
||||
def cache(fn, self, con, *args, **kw):
|
||||
info_cache = kw.get('info_cache', None)
|
||||
if info_cache is None:
|
||||
return fn(self, con, *args, **kw)
|
||||
key = (
|
||||
fn.__name__,
|
||||
tuple(a for a in args if isinstance(a, util.string_types)),
|
||||
tuple((k, v) for k, v in kw.items() if
|
||||
isinstance(v,
|
||||
util.string_types + util.int_types + (float, )
|
||||
)
|
||||
)
|
||||
)
|
||||
ret = info_cache.get(key)
|
||||
if ret is None:
|
||||
ret = fn(self, con, *args, **kw)
|
||||
info_cache[key] = ret
|
||||
return ret
|
||||
|
||||
|
||||
class Inspector(object):
|
||||
"""Performs database schema inspection.
|
||||
|
||||
The Inspector acts as a proxy to the reflection methods of the
|
||||
:class:`~sqlalchemy.engine.interfaces.Dialect`, providing a
|
||||
consistent interface as well as caching support for previously
|
||||
fetched metadata.
|
||||
|
||||
A :class:`.Inspector` object is usually created via the
|
||||
:func:`.inspect` function::
|
||||
|
||||
from sqlalchemy import inspect, create_engine
|
||||
engine = create_engine('...')
|
||||
insp = inspect(engine)
|
||||
|
||||
The inspection method above is equivalent to using the
|
||||
:meth:`.Inspector.from_engine` method, i.e.::
|
||||
|
||||
engine = create_engine('...')
|
||||
insp = Inspector.from_engine(engine)
|
||||
|
||||
Where above, the :class:`~sqlalchemy.engine.interfaces.Dialect` may opt
|
||||
to return an :class:`.Inspector` subclass that provides additional
|
||||
methods specific to the dialect's target database.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, bind):
|
||||
"""Initialize a new :class:`.Inspector`.
|
||||
|
||||
:param bind: a :class:`~sqlalchemy.engine.Connectable`,
|
||||
which is typically an instance of
|
||||
:class:`~sqlalchemy.engine.Engine` or
|
||||
:class:`~sqlalchemy.engine.Connection`.
|
||||
|
||||
For a dialect-specific instance of :class:`.Inspector`, see
|
||||
:meth:`.Inspector.from_engine`
|
||||
|
||||
"""
|
||||
# this might not be a connection, it could be an engine.
|
||||
self.bind = bind
|
||||
|
||||
# set the engine
|
||||
if hasattr(bind, 'engine'):
|
||||
self.engine = bind.engine
|
||||
else:
|
||||
self.engine = bind
|
||||
|
||||
if self.engine is bind:
|
||||
# if engine, ensure initialized
|
||||
bind.connect().close()
|
||||
|
||||
self.dialect = self.engine.dialect
|
||||
self.info_cache = {}
|
||||
|
||||
@classmethod
|
||||
def from_engine(cls, bind):
|
||||
"""Construct a new dialect-specific Inspector object from the given
|
||||
engine or connection.
|
||||
|
||||
:param bind: a :class:`~sqlalchemy.engine.Connectable`,
|
||||
which is typically an instance of
|
||||
:class:`~sqlalchemy.engine.Engine` or
|
||||
:class:`~sqlalchemy.engine.Connection`.
|
||||
|
||||
This method differs from direct a direct constructor call of
|
||||
:class:`.Inspector` in that the
|
||||
:class:`~sqlalchemy.engine.interfaces.Dialect` is given a chance to
|
||||
provide a dialect-specific :class:`.Inspector` instance, which may
|
||||
provide additional methods.
|
||||
|
||||
See the example at :class:`.Inspector`.
|
||||
|
||||
"""
|
||||
if hasattr(bind.dialect, 'inspector'):
|
||||
return bind.dialect.inspector(bind)
|
||||
return Inspector(bind)
|
||||
|
||||
@inspection._inspects(Connectable)
|
||||
def _insp(bind):
|
||||
return Inspector.from_engine(bind)
|
||||
|
||||
@property
|
||||
def default_schema_name(self):
|
||||
"""Return the default schema name presented by the dialect
|
||||
for the current engine's database user.
|
||||
|
||||
E.g. this is typically ``public`` for PostgreSQL and ``dbo``
|
||||
for SQL Server.
|
||||
|
||||
"""
|
||||
return self.dialect.default_schema_name
|
||||
|
||||
def get_schema_names(self):
|
||||
"""Return all schema names.
|
||||
"""
|
||||
|
||||
if hasattr(self.dialect, 'get_schema_names'):
|
||||
return self.dialect.get_schema_names(self.bind,
|
||||
info_cache=self.info_cache)
|
||||
return []
|
||||
|
||||
def get_table_names(self, schema=None, order_by=None):
|
||||
"""Return all table names in referred to within a particular schema.
|
||||
|
||||
The names are expected to be real tables only, not views.
|
||||
Views are instead returned using the :meth:`.Inspector.get_view_names`
|
||||
method.
|
||||
|
||||
|
||||
:param schema: Schema name. If ``schema`` is left at ``None``, the
|
||||
database's default schema is
|
||||
used, else the named schema is searched. If the database does not
|
||||
support named schemas, behavior is undefined if ``schema`` is not
|
||||
passed as ``None``. For special quoting, use :class:`.quoted_name`.
|
||||
|
||||
:param order_by: Optional, may be the string "foreign_key" to sort
|
||||
the result on foreign key dependencies. Does not automatically
|
||||
resolve cycles, and will raise :class:`.CircularDependencyError`
|
||||
if cycles exist.
|
||||
|
||||
.. deprecated:: 1.0.0 - see
|
||||
:meth:`.Inspector.get_sorted_table_and_fkc_names` for a version
|
||||
of this which resolves foreign key cycles between tables
|
||||
automatically.
|
||||
|
||||
.. versionchanged:: 0.8 the "foreign_key" sorting sorts tables
|
||||
in order of dependee to dependent; that is, in creation
|
||||
order, rather than in drop order. This is to maintain
|
||||
consistency with similar features such as
|
||||
:attr:`.MetaData.sorted_tables` and :func:`.util.sort_tables`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.Inspector.get_sorted_table_and_fkc_names`
|
||||
|
||||
:attr:`.MetaData.sorted_tables`
|
||||
|
||||
"""
|
||||
|
||||
if hasattr(self.dialect, 'get_table_names'):
|
||||
tnames = self.dialect.get_table_names(
|
||||
self.bind, schema, info_cache=self.info_cache)
|
||||
else:
|
||||
tnames = self.engine.table_names(schema)
|
||||
if order_by == 'foreign_key':
|
||||
tuples = []
|
||||
for tname in tnames:
|
||||
for fkey in self.get_foreign_keys(tname, schema):
|
||||
if tname != fkey['referred_table']:
|
||||
tuples.append((fkey['referred_table'], tname))
|
||||
tnames = list(topological.sort(tuples, tnames))
|
||||
return tnames
|
||||
|
||||
def get_sorted_table_and_fkc_names(self, schema=None):
|
||||
"""Return dependency-sorted table and foreign key constraint names in
|
||||
referred to within a particular schema.
|
||||
|
||||
This will yield 2-tuples of
|
||||
``(tablename, [(tname, fkname), (tname, fkname), ...])``
|
||||
consisting of table names in CREATE order grouped with the foreign key
|
||||
constraint names that are not detected as belonging to a cycle.
|
||||
The final element
|
||||
will be ``(None, [(tname, fkname), (tname, fkname), ..])``
|
||||
which will consist of remaining
|
||||
foreign key constraint names that would require a separate CREATE
|
||||
step after-the-fact, based on dependencies between tables.
|
||||
|
||||
.. versionadded:: 1.0.-
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.Inspector.get_table_names`
|
||||
|
||||
:func:`.sort_tables_and_constraints` - similar method which works
|
||||
with an already-given :class:`.MetaData`.
|
||||
|
||||
"""
|
||||
if hasattr(self.dialect, 'get_table_names'):
|
||||
tnames = self.dialect.get_table_names(
|
||||
self.bind, schema, info_cache=self.info_cache)
|
||||
else:
|
||||
tnames = self.engine.table_names(schema)
|
||||
|
||||
tuples = set()
|
||||
remaining_fkcs = set()
|
||||
|
||||
fknames_for_table = {}
|
||||
for tname in tnames:
|
||||
fkeys = self.get_foreign_keys(tname, schema)
|
||||
fknames_for_table[tname] = set(
|
||||
[fk['name'] for fk in fkeys]
|
||||
)
|
||||
for fkey in fkeys:
|
||||
if tname != fkey['referred_table']:
|
||||
tuples.add((fkey['referred_table'], tname))
|
||||
try:
|
||||
candidate_sort = list(topological.sort(tuples, tnames))
|
||||
except exc.CircularDependencyError as err:
|
||||
for edge in err.edges:
|
||||
tuples.remove(edge)
|
||||
remaining_fkcs.update(
|
||||
(edge[1], fkc)
|
||||
for fkc in fknames_for_table[edge[1]]
|
||||
)
|
||||
|
||||
candidate_sort = list(topological.sort(tuples, tnames))
|
||||
return [
|
||||
(tname, fknames_for_table[tname].difference(remaining_fkcs))
|
||||
for tname in candidate_sort
|
||||
] + [(None, list(remaining_fkcs))]
|
||||
|
||||
def get_temp_table_names(self):
|
||||
"""return a list of temporary table names for the current bind.
|
||||
|
||||
This method is unsupported by most dialects; currently
|
||||
only SQLite implements it.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
"""
|
||||
return self.dialect.get_temp_table_names(
|
||||
self.bind, info_cache=self.info_cache)
|
||||
|
||||
def get_temp_view_names(self):
|
||||
"""return a list of temporary view names for the current bind.
|
||||
|
||||
This method is unsupported by most dialects; currently
|
||||
only SQLite implements it.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
"""
|
||||
return self.dialect.get_temp_view_names(
|
||||
self.bind, info_cache=self.info_cache)
|
||||
|
||||
def get_table_options(self, table_name, schema=None, **kw):
|
||||
"""Return a dictionary of options specified when the table of the
|
||||
given name was created.
|
||||
|
||||
This currently includes some options that apply to MySQL tables.
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:param schema: string schema name; if omitted, uses the default schema
|
||||
of the database connection. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
"""
|
||||
if hasattr(self.dialect, 'get_table_options'):
|
||||
return self.dialect.get_table_options(
|
||||
self.bind, table_name, schema,
|
||||
info_cache=self.info_cache, **kw)
|
||||
return {}
|
||||
|
||||
def get_view_names(self, schema=None):
|
||||
"""Return all view names in `schema`.
|
||||
|
||||
:param schema: Optional, retrieve names from a non-default schema.
|
||||
For special quoting, use :class:`.quoted_name`.
|
||||
|
||||
"""
|
||||
|
||||
return self.dialect.get_view_names(self.bind, schema,
|
||||
info_cache=self.info_cache)
|
||||
|
||||
def get_view_definition(self, view_name, schema=None):
|
||||
"""Return definition for `view_name`.
|
||||
|
||||
:param schema: Optional, retrieve names from a non-default schema.
|
||||
For special quoting, use :class:`.quoted_name`.
|
||||
|
||||
"""
|
||||
|
||||
return self.dialect.get_view_definition(
|
||||
self.bind, view_name, schema, info_cache=self.info_cache)
|
||||
|
||||
def get_columns(self, table_name, schema=None, **kw):
|
||||
"""Return information about columns in `table_name`.
|
||||
|
||||
Given a string `table_name` and an optional string `schema`, return
|
||||
column information as a list of dicts with these keys:
|
||||
|
||||
* ``name`` - the column's name
|
||||
|
||||
* ``type`` - the type of this column; an instance of
|
||||
:class:`~sqlalchemy.types.TypeEngine`
|
||||
|
||||
* ``nullable`` - boolean flag if the column is NULL or NOT NULL
|
||||
|
||||
* ``default`` - the column's server default value - this is returned
|
||||
as a string SQL expression.
|
||||
|
||||
* ``attrs`` - dict containing optional column attributes
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:param schema: string schema name; if omitted, uses the default schema
|
||||
of the database connection. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:return: list of dictionaries, each representing the definition of
|
||||
a database column.
|
||||
|
||||
"""
|
||||
|
||||
col_defs = self.dialect.get_columns(self.bind, table_name, schema,
|
||||
info_cache=self.info_cache,
|
||||
**kw)
|
||||
for col_def in col_defs:
|
||||
# make this easy and only return instances for coltype
|
||||
coltype = col_def['type']
|
||||
if not isinstance(coltype, TypeEngine):
|
||||
col_def['type'] = coltype()
|
||||
return col_defs
|
||||
|
||||
@deprecated('0.7', 'Call to deprecated method get_primary_keys.'
|
||||
' Use get_pk_constraint instead.')
|
||||
def get_primary_keys(self, table_name, schema=None, **kw):
|
||||
"""Return information about primary keys in `table_name`.
|
||||
|
||||
Given a string `table_name`, and an optional string `schema`, return
|
||||
primary key information as a list of column names.
|
||||
"""
|
||||
|
||||
return self.dialect.get_pk_constraint(self.bind, table_name, schema,
|
||||
info_cache=self.info_cache,
|
||||
**kw)['constrained_columns']
|
||||
|
||||
def get_pk_constraint(self, table_name, schema=None, **kw):
|
||||
"""Return information about primary key constraint on `table_name`.
|
||||
|
||||
Given a string `table_name`, and an optional string `schema`, return
|
||||
primary key information as a dictionary with these keys:
|
||||
|
||||
constrained_columns
|
||||
a list of column names that make up the primary key
|
||||
|
||||
name
|
||||
optional name of the primary key constraint.
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:param schema: string schema name; if omitted, uses the default schema
|
||||
of the database connection. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
"""
|
||||
return self.dialect.get_pk_constraint(self.bind, table_name, schema,
|
||||
info_cache=self.info_cache,
|
||||
**kw)
|
||||
|
||||
def get_foreign_keys(self, table_name, schema=None, **kw):
|
||||
"""Return information about foreign_keys in `table_name`.
|
||||
|
||||
Given a string `table_name`, and an optional string `schema`, return
|
||||
foreign key information as a list of dicts with these keys:
|
||||
|
||||
constrained_columns
|
||||
a list of column names that make up the foreign key
|
||||
|
||||
referred_schema
|
||||
the name of the referred schema
|
||||
|
||||
referred_table
|
||||
the name of the referred table
|
||||
|
||||
referred_columns
|
||||
a list of column names in the referred table that correspond to
|
||||
constrained_columns
|
||||
|
||||
name
|
||||
optional name of the foreign key constraint.
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:param schema: string schema name; if omitted, uses the default schema
|
||||
of the database connection. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
"""
|
||||
|
||||
return self.dialect.get_foreign_keys(self.bind, table_name, schema,
|
||||
info_cache=self.info_cache,
|
||||
**kw)
|
||||
|
||||
def get_indexes(self, table_name, schema=None, **kw):
|
||||
"""Return information about indexes in `table_name`.
|
||||
|
||||
Given a string `table_name` and an optional string `schema`, return
|
||||
index information as a list of dicts with these keys:
|
||||
|
||||
name
|
||||
the index's name
|
||||
|
||||
column_names
|
||||
list of column names in order
|
||||
|
||||
unique
|
||||
boolean
|
||||
|
||||
dialect_options
|
||||
dict of dialect-specific index options. May not be present
|
||||
for all dialects.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:param schema: string schema name; if omitted, uses the default schema
|
||||
of the database connection. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
"""
|
||||
|
||||
return self.dialect.get_indexes(self.bind, table_name,
|
||||
schema,
|
||||
info_cache=self.info_cache, **kw)
|
||||
|
||||
def get_unique_constraints(self, table_name, schema=None, **kw):
|
||||
"""Return information about unique constraints in `table_name`.
|
||||
|
||||
Given a string `table_name` and an optional string `schema`, return
|
||||
unique constraint information as a list of dicts with these keys:
|
||||
|
||||
name
|
||||
the unique constraint's name
|
||||
|
||||
column_names
|
||||
list of column names in order
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:param schema: string schema name; if omitted, uses the default schema
|
||||
of the database connection. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
.. versionadded:: 0.8.4
|
||||
|
||||
"""
|
||||
|
||||
return self.dialect.get_unique_constraints(
|
||||
self.bind, table_name, schema, info_cache=self.info_cache, **kw)
|
||||
|
||||
def get_table_comment(self, table_name, schema=None, **kw):
|
||||
"""Return information about the table comment for ``table_name``.
|
||||
|
||||
Given a string ``table_name`` and an optional string ``schema``,
|
||||
return table comment information as a dictionary with these keys:
|
||||
|
||||
text
|
||||
text of the comment.
|
||||
|
||||
Raises ``NotImplementedError`` for a dialect that does not support
|
||||
comments.
|
||||
|
||||
.. versionadded:: 1.2
|
||||
|
||||
"""
|
||||
|
||||
return self.dialect.get_table_comment(
|
||||
self.bind, table_name, schema, info_cache=self.info_cache,
|
||||
**kw)
|
||||
|
||||
def get_check_constraints(self, table_name, schema=None, **kw):
|
||||
"""Return information about check constraints in `table_name`.
|
||||
|
||||
Given a string `table_name` and an optional string `schema`, return
|
||||
check constraint information as a list of dicts with these keys:
|
||||
|
||||
name
|
||||
the check constraint's name
|
||||
|
||||
sqltext
|
||||
the check constraint's SQL expression
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:param schema: string schema name; if omitted, uses the default schema
|
||||
of the database connection. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
.. versionadded:: 1.1.0
|
||||
|
||||
"""
|
||||
|
||||
return self.dialect.get_check_constraints(
|
||||
self.bind, table_name, schema, info_cache=self.info_cache, **kw)
|
||||
|
||||
def reflecttable(self, table, include_columns, exclude_columns=(),
|
||||
_extend_on=None):
|
||||
"""Given a Table object, load its internal constructs based on
|
||||
introspection.
|
||||
|
||||
This is the underlying method used by most dialects to produce
|
||||
table reflection. Direct usage is like::
|
||||
|
||||
from sqlalchemy import create_engine, MetaData, Table
|
||||
from sqlalchemy.engine.reflection import Inspector
|
||||
|
||||
engine = create_engine('...')
|
||||
meta = MetaData()
|
||||
user_table = Table('user', meta)
|
||||
insp = Inspector.from_engine(engine)
|
||||
insp.reflecttable(user_table, None)
|
||||
|
||||
:param table: a :class:`~sqlalchemy.schema.Table` instance.
|
||||
:param include_columns: a list of string column names to include
|
||||
in the reflection process. If ``None``, all columns are reflected.
|
||||
|
||||
"""
|
||||
|
||||
if _extend_on is not None:
|
||||
if table in _extend_on:
|
||||
return
|
||||
else:
|
||||
_extend_on.add(table)
|
||||
|
||||
dialect = self.bind.dialect
|
||||
|
||||
schema = self.bind.schema_for_object(table)
|
||||
|
||||
table_name = table.name
|
||||
|
||||
# get table-level arguments that are specifically
|
||||
# intended for reflection, e.g. oracle_resolve_synonyms.
|
||||
# these are unconditionally passed to related Table
|
||||
# objects
|
||||
reflection_options = dict(
|
||||
(k, table.dialect_kwargs.get(k))
|
||||
for k in dialect.reflection_options
|
||||
if k in table.dialect_kwargs
|
||||
)
|
||||
|
||||
# reflect table options, like mysql_engine
|
||||
tbl_opts = self.get_table_options(
|
||||
table_name, schema, **table.dialect_kwargs)
|
||||
if tbl_opts:
|
||||
# add additional kwargs to the Table if the dialect
|
||||
# returned them
|
||||
table._validate_dialect_kwargs(tbl_opts)
|
||||
|
||||
if util.py2k:
|
||||
if isinstance(schema, str):
|
||||
schema = schema.decode(dialect.encoding)
|
||||
if isinstance(table_name, str):
|
||||
table_name = table_name.decode(dialect.encoding)
|
||||
|
||||
found_table = False
|
||||
cols_by_orig_name = {}
|
||||
|
||||
for col_d in self.get_columns(
|
||||
table_name, schema, **table.dialect_kwargs):
|
||||
found_table = True
|
||||
|
||||
self._reflect_column(
|
||||
table, col_d, include_columns,
|
||||
exclude_columns, cols_by_orig_name)
|
||||
|
||||
if not found_table:
|
||||
raise exc.NoSuchTableError(table.name)
|
||||
|
||||
self._reflect_pk(
|
||||
table_name, schema, table, cols_by_orig_name, exclude_columns)
|
||||
|
||||
self._reflect_fk(
|
||||
table_name, schema, table, cols_by_orig_name,
|
||||
exclude_columns, _extend_on, reflection_options)
|
||||
|
||||
self._reflect_indexes(
|
||||
table_name, schema, table, cols_by_orig_name,
|
||||
include_columns, exclude_columns, reflection_options)
|
||||
|
||||
self._reflect_unique_constraints(
|
||||
table_name, schema, table, cols_by_orig_name,
|
||||
include_columns, exclude_columns, reflection_options)
|
||||
|
||||
self._reflect_check_constraints(
|
||||
table_name, schema, table, cols_by_orig_name,
|
||||
include_columns, exclude_columns, reflection_options)
|
||||
|
||||
self._reflect_table_comment(
|
||||
table_name, schema, table, reflection_options
|
||||
)
|
||||
|
||||
def _reflect_column(
|
||||
self, table, col_d, include_columns,
|
||||
exclude_columns, cols_by_orig_name):
|
||||
|
||||
orig_name = col_d['name']
|
||||
|
||||
table.dispatch.column_reflect(self, table, col_d)
|
||||
|
||||
# fetch name again as column_reflect is allowed to
|
||||
# change it
|
||||
name = col_d['name']
|
||||
if (include_columns and name not in include_columns) \
|
||||
or (exclude_columns and name in exclude_columns):
|
||||
return
|
||||
|
||||
coltype = col_d['type']
|
||||
|
||||
col_kw = dict(
|
||||
(k, col_d[k])
|
||||
for k in ['nullable', 'autoincrement', 'quote', 'info', 'key', 'comment']
|
||||
if k in col_d
|
||||
)
|
||||
|
||||
colargs = []
|
||||
if col_d.get('default') is not None:
|
||||
default = col_d['default']
|
||||
if isinstance(default, sql.elements.TextClause):
|
||||
default = sa_schema.DefaultClause(default, _reflected=True)
|
||||
elif not isinstance(default, sa_schema.FetchedValue):
|
||||
default = sa_schema.DefaultClause(
|
||||
sql.text(col_d['default']), _reflected=True)
|
||||
|
||||
colargs.append(default)
|
||||
|
||||
if 'sequence' in col_d:
|
||||
self._reflect_col_sequence(col_d, colargs)
|
||||
|
||||
cols_by_orig_name[orig_name] = col = \
|
||||
sa_schema.Column(name, coltype, *colargs, **col_kw)
|
||||
|
||||
if col.key in table.primary_key:
|
||||
col.primary_key = True
|
||||
table.append_column(col)
|
||||
|
||||
def _reflect_col_sequence(self, col_d, colargs):
|
||||
if 'sequence' in col_d:
|
||||
# TODO: mssql and sybase are using this.
|
||||
seq = col_d['sequence']
|
||||
sequence = sa_schema.Sequence(seq['name'], 1, 1)
|
||||
if 'start' in seq:
|
||||
sequence.start = seq['start']
|
||||
if 'increment' in seq:
|
||||
sequence.increment = seq['increment']
|
||||
colargs.append(sequence)
|
||||
|
||||
def _reflect_pk(
|
||||
self, table_name, schema, table,
|
||||
cols_by_orig_name, exclude_columns):
|
||||
pk_cons = self.get_pk_constraint(
|
||||
table_name, schema, **table.dialect_kwargs)
|
||||
if pk_cons:
|
||||
pk_cols = [
|
||||
cols_by_orig_name[pk]
|
||||
for pk in pk_cons['constrained_columns']
|
||||
if pk in cols_by_orig_name and pk not in exclude_columns
|
||||
]
|
||||
|
||||
# update pk constraint name
|
||||
table.primary_key.name = pk_cons.get('name')
|
||||
|
||||
# tell the PKConstraint to re-initialize
|
||||
# its column collection
|
||||
table.primary_key._reload(pk_cols)
|
||||
|
||||
def _reflect_fk(
|
||||
self, table_name, schema, table, cols_by_orig_name,
|
||||
exclude_columns, _extend_on, reflection_options):
|
||||
fkeys = self.get_foreign_keys(
|
||||
table_name, schema, **table.dialect_kwargs)
|
||||
for fkey_d in fkeys:
|
||||
conname = fkey_d['name']
|
||||
# look for columns by orig name in cols_by_orig_name,
|
||||
# but support columns that are in-Python only as fallback
|
||||
constrained_columns = [
|
||||
cols_by_orig_name[c].key
|
||||
if c in cols_by_orig_name else c
|
||||
for c in fkey_d['constrained_columns']
|
||||
]
|
||||
if exclude_columns and set(constrained_columns).intersection(
|
||||
exclude_columns):
|
||||
continue
|
||||
referred_schema = fkey_d['referred_schema']
|
||||
referred_table = fkey_d['referred_table']
|
||||
referred_columns = fkey_d['referred_columns']
|
||||
refspec = []
|
||||
if referred_schema is not None:
|
||||
sa_schema.Table(referred_table, table.metadata,
|
||||
autoload=True, schema=referred_schema,
|
||||
autoload_with=self.bind,
|
||||
_extend_on=_extend_on,
|
||||
**reflection_options
|
||||
)
|
||||
for column in referred_columns:
|
||||
refspec.append(".".join(
|
||||
[referred_schema, referred_table, column]))
|
||||
else:
|
||||
sa_schema.Table(referred_table, table.metadata, autoload=True,
|
||||
autoload_with=self.bind,
|
||||
schema=sa_schema.BLANK_SCHEMA,
|
||||
_extend_on=_extend_on,
|
||||
**reflection_options
|
||||
)
|
||||
for column in referred_columns:
|
||||
refspec.append(".".join([referred_table, column]))
|
||||
if 'options' in fkey_d:
|
||||
options = fkey_d['options']
|
||||
else:
|
||||
options = {}
|
||||
table.append_constraint(
|
||||
sa_schema.ForeignKeyConstraint(constrained_columns, refspec,
|
||||
conname, link_to_name=True,
|
||||
**options))
|
||||
|
||||
def _reflect_indexes(
|
||||
self, table_name, schema, table, cols_by_orig_name,
|
||||
include_columns, exclude_columns, reflection_options):
|
||||
# Indexes
|
||||
indexes = self.get_indexes(table_name, schema)
|
||||
for index_d in indexes:
|
||||
name = index_d['name']
|
||||
columns = index_d['column_names']
|
||||
unique = index_d['unique']
|
||||
flavor = index_d.get('type', 'index')
|
||||
dialect_options = index_d.get('dialect_options', {})
|
||||
|
||||
duplicates = index_d.get('duplicates_constraint')
|
||||
if include_columns and \
|
||||
not set(columns).issubset(include_columns):
|
||||
util.warn(
|
||||
"Omitting %s key for (%s), key covers omitted columns." %
|
||||
(flavor, ', '.join(columns)))
|
||||
continue
|
||||
if duplicates:
|
||||
continue
|
||||
# look for columns by orig name in cols_by_orig_name,
|
||||
# but support columns that are in-Python only as fallback
|
||||
idx_cols = []
|
||||
for c in columns:
|
||||
try:
|
||||
idx_col = cols_by_orig_name[c] \
|
||||
if c in cols_by_orig_name else table.c[c]
|
||||
except KeyError:
|
||||
util.warn(
|
||||
"%s key '%s' was not located in "
|
||||
"columns for table '%s'" % (
|
||||
flavor, c, table_name
|
||||
))
|
||||
else:
|
||||
idx_cols.append(idx_col)
|
||||
|
||||
sa_schema.Index(
|
||||
name, *idx_cols,
|
||||
_table=table,
|
||||
**dict(list(dialect_options.items()) + [('unique', unique)])
|
||||
)
|
||||
|
||||
def _reflect_unique_constraints(
|
||||
self, table_name, schema, table, cols_by_orig_name,
|
||||
include_columns, exclude_columns, reflection_options):
|
||||
|
||||
# Unique Constraints
|
||||
try:
|
||||
constraints = self.get_unique_constraints(table_name, schema)
|
||||
except NotImplementedError:
|
||||
# optional dialect feature
|
||||
return
|
||||
|
||||
for const_d in constraints:
|
||||
conname = const_d['name']
|
||||
columns = const_d['column_names']
|
||||
duplicates = const_d.get('duplicates_index')
|
||||
if include_columns and \
|
||||
not set(columns).issubset(include_columns):
|
||||
util.warn(
|
||||
"Omitting unique constraint key for (%s), "
|
||||
"key covers omitted columns." %
|
||||
', '.join(columns))
|
||||
continue
|
||||
if duplicates:
|
||||
continue
|
||||
# look for columns by orig name in cols_by_orig_name,
|
||||
# but support columns that are in-Python only as fallback
|
||||
constrained_cols = []
|
||||
for c in columns:
|
||||
try:
|
||||
constrained_col = cols_by_orig_name[c] \
|
||||
if c in cols_by_orig_name else table.c[c]
|
||||
except KeyError:
|
||||
util.warn(
|
||||
"unique constraint key '%s' was not located in "
|
||||
"columns for table '%s'" % (c, table_name))
|
||||
else:
|
||||
constrained_cols.append(constrained_col)
|
||||
table.append_constraint(
|
||||
sa_schema.UniqueConstraint(*constrained_cols, name=conname))
|
||||
|
||||
def _reflect_check_constraints(
|
||||
self, table_name, schema, table, cols_by_orig_name,
|
||||
include_columns, exclude_columns, reflection_options):
|
||||
try:
|
||||
constraints = self.get_check_constraints(table_name, schema)
|
||||
except NotImplementedError:
|
||||
# optional dialect feature
|
||||
return
|
||||
|
||||
for const_d in constraints:
|
||||
table.append_constraint(
|
||||
sa_schema.CheckConstraint(**const_d))
|
||||
|
||||
def _reflect_table_comment(
|
||||
self, table_name, schema, table, reflection_options):
|
||||
try:
|
||||
comment_dict = self.get_table_comment(table_name, schema)
|
||||
except NotImplementedError:
|
||||
return
|
||||
else:
|
||||
table.comment = comment_dict.get('text', None)
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,285 @@
|
|||
# engine/strategies.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Strategies for creating new instances of Engine types.
|
||||
|
||||
These are semi-private implementation classes which provide the
|
||||
underlying behavior for the "strategy" keyword argument available on
|
||||
:func:`~sqlalchemy.engine.create_engine`. Current available options are
|
||||
``plain``, ``threadlocal``, and ``mock``.
|
||||
|
||||
New strategies can be added via new ``EngineStrategy`` classes.
|
||||
"""
|
||||
|
||||
from operator import attrgetter
|
||||
|
||||
from sqlalchemy.engine import base, threadlocal, url
|
||||
from sqlalchemy import util, event
|
||||
from sqlalchemy import pool as poollib
|
||||
from sqlalchemy.sql import schema
|
||||
|
||||
strategies = {}
|
||||
|
||||
|
||||
class EngineStrategy(object):
|
||||
"""An adaptor that processes input arguments and produces an Engine.
|
||||
|
||||
Provides a ``create`` method that receives input arguments and
|
||||
produces an instance of base.Engine or a subclass.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
strategies[self.name] = self
|
||||
|
||||
def create(self, *args, **kwargs):
|
||||
"""Given arguments, returns a new Engine instance."""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class DefaultEngineStrategy(EngineStrategy):
|
||||
"""Base class for built-in strategies."""
|
||||
|
||||
def create(self, name_or_url, **kwargs):
|
||||
# create url.URL object
|
||||
u = url.make_url(name_or_url)
|
||||
|
||||
plugins = u._instantiate_plugins(kwargs)
|
||||
|
||||
u.query.pop('plugin', None)
|
||||
kwargs.pop('plugins', None)
|
||||
|
||||
entrypoint = u._get_entrypoint()
|
||||
dialect_cls = entrypoint.get_dialect_cls(u)
|
||||
|
||||
if kwargs.pop('_coerce_config', False):
|
||||
def pop_kwarg(key, default=None):
|
||||
value = kwargs.pop(key, default)
|
||||
if key in dialect_cls.engine_config_types:
|
||||
value = dialect_cls.engine_config_types[key](value)
|
||||
return value
|
||||
else:
|
||||
pop_kwarg = kwargs.pop
|
||||
|
||||
dialect_args = {}
|
||||
# consume dialect arguments from kwargs
|
||||
for k in util.get_cls_kwargs(dialect_cls):
|
||||
if k in kwargs:
|
||||
dialect_args[k] = pop_kwarg(k)
|
||||
|
||||
dbapi = kwargs.pop('module', None)
|
||||
if dbapi is None:
|
||||
dbapi_args = {}
|
||||
for k in util.get_func_kwargs(dialect_cls.dbapi):
|
||||
if k in kwargs:
|
||||
dbapi_args[k] = pop_kwarg(k)
|
||||
dbapi = dialect_cls.dbapi(**dbapi_args)
|
||||
|
||||
dialect_args['dbapi'] = dbapi
|
||||
|
||||
for plugin in plugins:
|
||||
plugin.handle_dialect_kwargs(dialect_cls, dialect_args)
|
||||
|
||||
# create dialect
|
||||
dialect = dialect_cls(**dialect_args)
|
||||
|
||||
# assemble connection arguments
|
||||
(cargs, cparams) = dialect.create_connect_args(u)
|
||||
cparams.update(pop_kwarg('connect_args', {}))
|
||||
cargs = list(cargs) # allow mutability
|
||||
|
||||
# look for existing pool or create
|
||||
pool = pop_kwarg('pool', None)
|
||||
if pool is None:
|
||||
def connect(connection_record=None):
|
||||
if dialect._has_events:
|
||||
for fn in dialect.dispatch.do_connect:
|
||||
connection = fn(
|
||||
dialect, connection_record, cargs, cparams)
|
||||
if connection is not None:
|
||||
return connection
|
||||
return dialect.connect(*cargs, **cparams)
|
||||
|
||||
creator = pop_kwarg('creator', connect)
|
||||
|
||||
poolclass = pop_kwarg('poolclass', None)
|
||||
if poolclass is None:
|
||||
poolclass = dialect_cls.get_pool_class(u)
|
||||
pool_args = {
|
||||
'dialect': dialect
|
||||
}
|
||||
|
||||
# consume pool arguments from kwargs, translating a few of
|
||||
# the arguments
|
||||
translate = {'logging_name': 'pool_logging_name',
|
||||
'echo': 'echo_pool',
|
||||
'timeout': 'pool_timeout',
|
||||
'recycle': 'pool_recycle',
|
||||
'events': 'pool_events',
|
||||
'use_threadlocal': 'pool_threadlocal',
|
||||
'reset_on_return': 'pool_reset_on_return',
|
||||
'pre_ping': 'pool_pre_ping'}
|
||||
for k in util.get_cls_kwargs(poolclass):
|
||||
tk = translate.get(k, k)
|
||||
if tk in kwargs:
|
||||
pool_args[k] = pop_kwarg(tk)
|
||||
|
||||
for plugin in plugins:
|
||||
plugin.handle_pool_kwargs(poolclass, pool_args)
|
||||
|
||||
pool = poolclass(creator, **pool_args)
|
||||
else:
|
||||
if isinstance(pool, poollib._DBProxy):
|
||||
pool = pool.get_pool(*cargs, **cparams)
|
||||
else:
|
||||
pool = pool
|
||||
|
||||
pool._dialect = dialect
|
||||
|
||||
# create engine.
|
||||
engineclass = self.engine_cls
|
||||
engine_args = {}
|
||||
for k in util.get_cls_kwargs(engineclass):
|
||||
if k in kwargs:
|
||||
engine_args[k] = pop_kwarg(k)
|
||||
|
||||
_initialize = kwargs.pop('_initialize', True)
|
||||
|
||||
# all kwargs should be consumed
|
||||
if kwargs:
|
||||
raise TypeError(
|
||||
"Invalid argument(s) %s sent to create_engine(), "
|
||||
"using configuration %s/%s/%s. Please check that the "
|
||||
"keyword arguments are appropriate for this combination "
|
||||
"of components." % (','.join("'%s'" % k for k in kwargs),
|
||||
dialect.__class__.__name__,
|
||||
pool.__class__.__name__,
|
||||
engineclass.__name__))
|
||||
|
||||
engine = engineclass(pool, dialect, u, **engine_args)
|
||||
|
||||
if _initialize:
|
||||
do_on_connect = dialect.on_connect()
|
||||
if do_on_connect:
|
||||
def on_connect(dbapi_connection, connection_record):
|
||||
conn = getattr(
|
||||
dbapi_connection, '_sqla_unwrap', dbapi_connection)
|
||||
if conn is None:
|
||||
return
|
||||
do_on_connect(conn)
|
||||
|
||||
event.listen(pool, 'first_connect', on_connect)
|
||||
event.listen(pool, 'connect', on_connect)
|
||||
|
||||
def first_connect(dbapi_connection, connection_record):
|
||||
c = base.Connection(engine, connection=dbapi_connection,
|
||||
_has_events=False)
|
||||
c._execution_options = util.immutabledict()
|
||||
dialect.initialize(c)
|
||||
event.listen(pool, 'first_connect', first_connect, once=True)
|
||||
|
||||
dialect_cls.engine_created(engine)
|
||||
if entrypoint is not dialect_cls:
|
||||
entrypoint.engine_created(engine)
|
||||
|
||||
for plugin in plugins:
|
||||
plugin.engine_created(engine)
|
||||
|
||||
return engine
|
||||
|
||||
|
||||
class PlainEngineStrategy(DefaultEngineStrategy):
|
||||
"""Strategy for configuring a regular Engine."""
|
||||
|
||||
name = 'plain'
|
||||
engine_cls = base.Engine
|
||||
|
||||
PlainEngineStrategy()
|
||||
|
||||
|
||||
class ThreadLocalEngineStrategy(DefaultEngineStrategy):
|
||||
"""Strategy for configuring an Engine with threadlocal behavior."""
|
||||
|
||||
name = 'threadlocal'
|
||||
engine_cls = threadlocal.TLEngine
|
||||
|
||||
ThreadLocalEngineStrategy()
|
||||
|
||||
|
||||
class MockEngineStrategy(EngineStrategy):
|
||||
"""Strategy for configuring an Engine-like object with mocked execution.
|
||||
|
||||
Produces a single mock Connectable object which dispatches
|
||||
statement execution to a passed-in function.
|
||||
|
||||
"""
|
||||
|
||||
name = 'mock'
|
||||
|
||||
def create(self, name_or_url, executor, **kwargs):
|
||||
# create url.URL object
|
||||
u = url.make_url(name_or_url)
|
||||
|
||||
dialect_cls = u.get_dialect()
|
||||
|
||||
dialect_args = {}
|
||||
# consume dialect arguments from kwargs
|
||||
for k in util.get_cls_kwargs(dialect_cls):
|
||||
if k in kwargs:
|
||||
dialect_args[k] = kwargs.pop(k)
|
||||
|
||||
# create dialect
|
||||
dialect = dialect_cls(**dialect_args)
|
||||
|
||||
return MockEngineStrategy.MockConnection(dialect, executor)
|
||||
|
||||
class MockConnection(base.Connectable):
|
||||
def __init__(self, dialect, execute):
|
||||
self._dialect = dialect
|
||||
self.execute = execute
|
||||
|
||||
engine = property(lambda s: s)
|
||||
dialect = property(attrgetter('_dialect'))
|
||||
name = property(lambda s: s._dialect.name)
|
||||
|
||||
schema_for_object = schema._schema_getter(None)
|
||||
|
||||
def contextual_connect(self, **kwargs):
|
||||
return self
|
||||
|
||||
def execution_options(self, **kw):
|
||||
return self
|
||||
|
||||
def compiler(self, statement, parameters, **kwargs):
|
||||
return self._dialect.compiler(
|
||||
statement, parameters, engine=self, **kwargs)
|
||||
|
||||
def create(self, entity, **kwargs):
|
||||
kwargs['checkfirst'] = False
|
||||
from sqlalchemy.engine import ddl
|
||||
|
||||
ddl.SchemaGenerator(
|
||||
self.dialect, self, **kwargs).traverse_single(entity)
|
||||
|
||||
def drop(self, entity, **kwargs):
|
||||
kwargs['checkfirst'] = False
|
||||
from sqlalchemy.engine import ddl
|
||||
ddl.SchemaDropper(
|
||||
self.dialect, self, **kwargs).traverse_single(entity)
|
||||
|
||||
def _run_visitor(self, visitorcallable, element,
|
||||
connection=None,
|
||||
**kwargs):
|
||||
kwargs['checkfirst'] = False
|
||||
visitorcallable(self.dialect, self,
|
||||
**kwargs).traverse_single(element)
|
||||
|
||||
def execute(self, object, *multiparams, **params):
|
||||
raise NotImplementedError()
|
||||
|
||||
MockEngineStrategy()
|
|
@ -0,0 +1,138 @@
|
|||
# engine/threadlocal.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Provides a thread-local transactional wrapper around the root Engine class.
|
||||
|
||||
The ``threadlocal`` module is invoked when using the
|
||||
``strategy="threadlocal"`` flag with :func:`~sqlalchemy.engine.create_engine`.
|
||||
This module is semi-private and is invoked automatically when the threadlocal
|
||||
engine strategy is used.
|
||||
"""
|
||||
|
||||
from .. import util
|
||||
from . import base
|
||||
import weakref
|
||||
|
||||
|
||||
class TLConnection(base.Connection):
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
super(TLConnection, self).__init__(*arg, **kw)
|
||||
self.__opencount = 0
|
||||
|
||||
def _increment_connect(self):
|
||||
self.__opencount += 1
|
||||
return self
|
||||
|
||||
def close(self):
|
||||
if self.__opencount == 1:
|
||||
base.Connection.close(self)
|
||||
self.__opencount -= 1
|
||||
|
||||
def _force_close(self):
|
||||
self.__opencount = 0
|
||||
base.Connection.close(self)
|
||||
|
||||
|
||||
class TLEngine(base.Engine):
|
||||
"""An Engine that includes support for thread-local managed
|
||||
transactions.
|
||||
|
||||
"""
|
||||
_tl_connection_cls = TLConnection
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(TLEngine, self).__init__(*args, **kwargs)
|
||||
self._connections = util.threading.local()
|
||||
|
||||
def contextual_connect(self, **kw):
|
||||
if not hasattr(self._connections, 'conn'):
|
||||
connection = None
|
||||
else:
|
||||
connection = self._connections.conn()
|
||||
|
||||
if connection is None or connection.closed:
|
||||
# guards against pool-level reapers, if desired.
|
||||
# or not connection.connection.is_valid:
|
||||
connection = self._tl_connection_cls(
|
||||
self,
|
||||
self._wrap_pool_connect(
|
||||
self.pool.connect, connection),
|
||||
**kw)
|
||||
self._connections.conn = weakref.ref(connection)
|
||||
|
||||
return connection._increment_connect()
|
||||
|
||||
def begin_twophase(self, xid=None):
|
||||
if not hasattr(self._connections, 'trans'):
|
||||
self._connections.trans = []
|
||||
self._connections.trans.append(
|
||||
self.contextual_connect().begin_twophase(xid=xid))
|
||||
return self
|
||||
|
||||
def begin_nested(self):
|
||||
if not hasattr(self._connections, 'trans'):
|
||||
self._connections.trans = []
|
||||
self._connections.trans.append(
|
||||
self.contextual_connect().begin_nested())
|
||||
return self
|
||||
|
||||
def begin(self):
|
||||
if not hasattr(self._connections, 'trans'):
|
||||
self._connections.trans = []
|
||||
self._connections.trans.append(self.contextual_connect().begin())
|
||||
return self
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
if type is None:
|
||||
self.commit()
|
||||
else:
|
||||
self.rollback()
|
||||
|
||||
def prepare(self):
|
||||
if not hasattr(self._connections, 'trans') or \
|
||||
not self._connections.trans:
|
||||
return
|
||||
self._connections.trans[-1].prepare()
|
||||
|
||||
def commit(self):
|
||||
if not hasattr(self._connections, 'trans') or \
|
||||
not self._connections.trans:
|
||||
return
|
||||
trans = self._connections.trans.pop(-1)
|
||||
trans.commit()
|
||||
|
||||
def rollback(self):
|
||||
if not hasattr(self._connections, 'trans') or \
|
||||
not self._connections.trans:
|
||||
return
|
||||
trans = self._connections.trans.pop(-1)
|
||||
trans.rollback()
|
||||
|
||||
def dispose(self):
|
||||
self._connections = util.threading.local()
|
||||
super(TLEngine, self).dispose()
|
||||
|
||||
@property
|
||||
def closed(self):
|
||||
return not hasattr(self._connections, 'conn') or \
|
||||
self._connections.conn() is None or \
|
||||
self._connections.conn().closed
|
||||
|
||||
def close(self):
|
||||
if not self.closed:
|
||||
self.contextual_connect().close()
|
||||
connection = self._connections.conn()
|
||||
connection._force_close()
|
||||
del self._connections.conn
|
||||
self._connections.trans = []
|
||||
|
||||
def __repr__(self):
|
||||
return 'TLEngine(%r)' % self.url
|
|
@ -0,0 +1,288 @@
|
|||
# engine/url.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Provides the :class:`~sqlalchemy.engine.url.URL` class which encapsulates
|
||||
information about a database connection specification.
|
||||
|
||||
The URL object is created automatically when
|
||||
:func:`~sqlalchemy.engine.create_engine` is called with a string
|
||||
argument; alternatively, the URL is a public-facing construct which can
|
||||
be used directly and is also accepted directly by ``create_engine()``.
|
||||
"""
|
||||
|
||||
import re
|
||||
from .. import exc, util
|
||||
from . import Dialect
|
||||
from ..dialects import registry, plugins
|
||||
|
||||
|
||||
class URL(object):
|
||||
"""
|
||||
Represent the components of a URL used to connect to a database.
|
||||
|
||||
This object is suitable to be passed directly to a
|
||||
:func:`~sqlalchemy.create_engine` call. The fields of the URL are parsed
|
||||
from a string by the :func:`.make_url` function. the string
|
||||
format of the URL is an RFC-1738-style string.
|
||||
|
||||
All initialization parameters are available as public attributes.
|
||||
|
||||
:param drivername: the name of the database backend.
|
||||
This name will correspond to a module in sqlalchemy/databases
|
||||
or a third party plug-in.
|
||||
|
||||
:param username: The user name.
|
||||
|
||||
:param password: database password.
|
||||
|
||||
:param host: The name of the host.
|
||||
|
||||
:param port: The port number.
|
||||
|
||||
:param database: The database name.
|
||||
|
||||
:param query: A dictionary of options to be passed to the
|
||||
dialect and/or the DBAPI upon connect.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, drivername, username=None, password=None,
|
||||
host=None, port=None, database=None, query=None):
|
||||
self.drivername = drivername
|
||||
self.username = username
|
||||
self.password_original = password
|
||||
self.host = host
|
||||
if port is not None:
|
||||
self.port = int(port)
|
||||
else:
|
||||
self.port = None
|
||||
self.database = database
|
||||
self.query = query or {}
|
||||
|
||||
def __to_string__(self, hide_password=True):
|
||||
s = self.drivername + "://"
|
||||
if self.username is not None:
|
||||
s += _rfc_1738_quote(self.username)
|
||||
if self.password is not None:
|
||||
s += ':' + ('***' if hide_password
|
||||
else _rfc_1738_quote(self.password))
|
||||
s += "@"
|
||||
if self.host is not None:
|
||||
if ':' in self.host:
|
||||
s += "[%s]" % self.host
|
||||
else:
|
||||
s += self.host
|
||||
if self.port is not None:
|
||||
s += ':' + str(self.port)
|
||||
if self.database is not None:
|
||||
s += '/' + self.database
|
||||
if self.query:
|
||||
keys = list(self.query)
|
||||
keys.sort()
|
||||
s += '?' + "&".join(
|
||||
"%s=%s" % (
|
||||
k,
|
||||
element
|
||||
) for k in keys for element in util.to_list(self.query[k])
|
||||
)
|
||||
return s
|
||||
|
||||
def __str__(self):
|
||||
return self.__to_string__(hide_password=False)
|
||||
|
||||
def __repr__(self):
|
||||
return self.__to_string__()
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self))
|
||||
|
||||
def __eq__(self, other):
|
||||
return \
|
||||
isinstance(other, URL) and \
|
||||
self.drivername == other.drivername and \
|
||||
self.username == other.username and \
|
||||
self.password == other.password and \
|
||||
self.host == other.host and \
|
||||
self.database == other.database and \
|
||||
self.query == other.query
|
||||
|
||||
@property
|
||||
def password(self):
|
||||
if self.password_original is None:
|
||||
return None
|
||||
else:
|
||||
return util.text_type(self.password_original)
|
||||
|
||||
@password.setter
|
||||
def password(self, password):
|
||||
self.password_original = password
|
||||
|
||||
def get_backend_name(self):
|
||||
if '+' not in self.drivername:
|
||||
return self.drivername
|
||||
else:
|
||||
return self.drivername.split('+')[0]
|
||||
|
||||
def get_driver_name(self):
|
||||
if '+' not in self.drivername:
|
||||
return self.get_dialect().driver
|
||||
else:
|
||||
return self.drivername.split('+')[1]
|
||||
|
||||
def _instantiate_plugins(self, kwargs):
|
||||
plugin_names = util.to_list(self.query.get('plugin', ()))
|
||||
plugin_names += kwargs.get('plugins', [])
|
||||
|
||||
return [
|
||||
plugins.load(plugin_name)(self, kwargs)
|
||||
for plugin_name in plugin_names
|
||||
]
|
||||
|
||||
def _get_entrypoint(self):
|
||||
"""Return the "entry point" dialect class.
|
||||
|
||||
This is normally the dialect itself except in the case when the
|
||||
returned class implements the get_dialect_cls() method.
|
||||
|
||||
"""
|
||||
if '+' not in self.drivername:
|
||||
name = self.drivername
|
||||
else:
|
||||
name = self.drivername.replace('+', '.')
|
||||
cls = registry.load(name)
|
||||
# check for legacy dialects that
|
||||
# would return a module with 'dialect' as the
|
||||
# actual class
|
||||
if hasattr(cls, 'dialect') and \
|
||||
isinstance(cls.dialect, type) and \
|
||||
issubclass(cls.dialect, Dialect):
|
||||
return cls.dialect
|
||||
else:
|
||||
return cls
|
||||
|
||||
def get_dialect(self):
|
||||
"""Return the SQLAlchemy database dialect class corresponding
|
||||
to this URL's driver name.
|
||||
"""
|
||||
entrypoint = self._get_entrypoint()
|
||||
dialect_cls = entrypoint.get_dialect_cls(self)
|
||||
return dialect_cls
|
||||
|
||||
def translate_connect_args(self, names=[], **kw):
|
||||
r"""Translate url attributes into a dictionary of connection arguments.
|
||||
|
||||
Returns attributes of this url (`host`, `database`, `username`,
|
||||
`password`, `port`) as a plain dictionary. The attribute names are
|
||||
used as the keys by default. Unset or false attributes are omitted
|
||||
from the final dictionary.
|
||||
|
||||
:param \**kw: Optional, alternate key names for url attributes.
|
||||
|
||||
:param names: Deprecated. Same purpose as the keyword-based alternate
|
||||
names, but correlates the name to the original positionally.
|
||||
"""
|
||||
|
||||
translated = {}
|
||||
attribute_names = ['host', 'database', 'username', 'password', 'port']
|
||||
for sname in attribute_names:
|
||||
if names:
|
||||
name = names.pop(0)
|
||||
elif sname in kw:
|
||||
name = kw[sname]
|
||||
else:
|
||||
name = sname
|
||||
if name is not None and getattr(self, sname, False):
|
||||
translated[name] = getattr(self, sname)
|
||||
return translated
|
||||
|
||||
|
||||
def make_url(name_or_url):
|
||||
"""Given a string or unicode instance, produce a new URL instance.
|
||||
|
||||
The given string is parsed according to the RFC 1738 spec. If an
|
||||
existing URL object is passed, just returns the object.
|
||||
"""
|
||||
|
||||
if isinstance(name_or_url, util.string_types):
|
||||
return _parse_rfc1738_args(name_or_url)
|
||||
else:
|
||||
return name_or_url
|
||||
|
||||
|
||||
def _parse_rfc1738_args(name):
|
||||
pattern = re.compile(r'''
|
||||
(?P<name>[\w\+]+)://
|
||||
(?:
|
||||
(?P<username>[^:/]*)
|
||||
(?::(?P<password>.*))?
|
||||
@)?
|
||||
(?:
|
||||
(?:
|
||||
\[(?P<ipv6host>[^/]+)\] |
|
||||
(?P<ipv4host>[^/:]+)
|
||||
)?
|
||||
(?::(?P<port>[^/]*))?
|
||||
)?
|
||||
(?:/(?P<database>.*))?
|
||||
''', re.X)
|
||||
|
||||
m = pattern.match(name)
|
||||
if m is not None:
|
||||
components = m.groupdict()
|
||||
if components['database'] is not None:
|
||||
tokens = components['database'].split('?', 2)
|
||||
components['database'] = tokens[0]
|
||||
|
||||
if len(tokens) > 1:
|
||||
query = {}
|
||||
|
||||
for key, value in util.parse_qsl(tokens[1]):
|
||||
if util.py2k:
|
||||
key = key.encode('ascii')
|
||||
if key in query:
|
||||
query[key] = util.to_list(query[key])
|
||||
query[key].append(value)
|
||||
else:
|
||||
query[key] = value
|
||||
else:
|
||||
query = None
|
||||
else:
|
||||
query = None
|
||||
components['query'] = query
|
||||
|
||||
if components['username'] is not None:
|
||||
components['username'] = _rfc_1738_unquote(components['username'])
|
||||
|
||||
if components['password'] is not None:
|
||||
components['password'] = _rfc_1738_unquote(components['password'])
|
||||
|
||||
ipv4host = components.pop('ipv4host')
|
||||
ipv6host = components.pop('ipv6host')
|
||||
components['host'] = ipv4host or ipv6host
|
||||
name = components.pop('name')
|
||||
return URL(name, **components)
|
||||
else:
|
||||
raise exc.ArgumentError(
|
||||
"Could not parse rfc1738 URL from string '%s'" % name)
|
||||
|
||||
|
||||
def _rfc_1738_quote(text):
|
||||
return re.sub(r'[:@/]', lambda m: "%%%X" % ord(m.group(0)), text)
|
||||
|
||||
|
||||
def _rfc_1738_unquote(text):
|
||||
return util.unquote(text)
|
||||
|
||||
|
||||
def _parse_keyvalue_args(name):
|
||||
m = re.match(r'(\w+)://(.*)', name)
|
||||
if m is not None:
|
||||
(name, args) = m.group(1, 2)
|
||||
opts = dict(util.parse_qsl(args))
|
||||
return URL(name, *opts)
|
||||
else:
|
||||
return None
|
|
@ -0,0 +1,74 @@
|
|||
# engine/util.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .. import util
|
||||
|
||||
|
||||
def connection_memoize(key):
|
||||
"""Decorator, memoize a function in a connection.info stash.
|
||||
|
||||
Only applicable to functions which take no arguments other than a
|
||||
connection. The memo will be stored in ``connection.info[key]``.
|
||||
"""
|
||||
|
||||
@util.decorator
|
||||
def decorated(fn, self, connection):
|
||||
connection = connection.connect()
|
||||
try:
|
||||
return connection.info[key]
|
||||
except KeyError:
|
||||
connection.info[key] = val = fn(self, connection)
|
||||
return val
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
def py_fallback():
|
||||
def _distill_params(multiparams, params):
|
||||
"""Given arguments from the calling form *multiparams, **params,
|
||||
return a list of bind parameter structures, usually a list of
|
||||
dictionaries.
|
||||
|
||||
In the case of 'raw' execution which accepts positional parameters,
|
||||
it may be a list of tuples or lists.
|
||||
|
||||
"""
|
||||
|
||||
if not multiparams:
|
||||
if params:
|
||||
return [params]
|
||||
else:
|
||||
return []
|
||||
elif len(multiparams) == 1:
|
||||
zero = multiparams[0]
|
||||
if isinstance(zero, (list, tuple)):
|
||||
if not zero or hasattr(zero[0], '__iter__') and \
|
||||
not hasattr(zero[0], 'strip'):
|
||||
# execute(stmt, [{}, {}, {}, ...])
|
||||
# execute(stmt, [(), (), (), ...])
|
||||
return zero
|
||||
else:
|
||||
# execute(stmt, ("value", "value"))
|
||||
return [zero]
|
||||
elif hasattr(zero, 'keys'):
|
||||
# execute(stmt, {"key":"value"})
|
||||
return [zero]
|
||||
else:
|
||||
# execute(stmt, "value")
|
||||
return [[zero]]
|
||||
else:
|
||||
if hasattr(multiparams[0], '__iter__') and \
|
||||
not hasattr(multiparams[0], 'strip'):
|
||||
return multiparams
|
||||
else:
|
||||
return [multiparams]
|
||||
|
||||
return locals()
|
||||
try:
|
||||
from sqlalchemy.cutils import _distill_params
|
||||
except ImportError:
|
||||
globals().update(py_fallback())
|
|
@ -0,0 +1,11 @@
|
|||
# event/__init__.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .api import CANCEL, NO_RETVAL, listen, listens_for, remove, contains
|
||||
from .base import Events, dispatcher
|
||||
from .attr import RefCollection
|
||||
from .legacy import _legacy_signature
|
|
@ -0,0 +1,188 @@
|
|||
# event/api.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Public API functions for the event system.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .. import util, exc
|
||||
from .base import _registrars
|
||||
from .registry import _EventKey
|
||||
|
||||
CANCEL = util.symbol('CANCEL')
|
||||
NO_RETVAL = util.symbol('NO_RETVAL')
|
||||
|
||||
|
||||
def _event_key(target, identifier, fn):
|
||||
for evt_cls in _registrars[identifier]:
|
||||
tgt = evt_cls._accept_with(target)
|
||||
if tgt is not None:
|
||||
return _EventKey(target, identifier, fn, tgt)
|
||||
else:
|
||||
raise exc.InvalidRequestError("No such event '%s' for target '%s'" %
|
||||
(identifier, target))
|
||||
|
||||
|
||||
def listen(target, identifier, fn, *args, **kw):
|
||||
"""Register a listener function for the given target.
|
||||
|
||||
e.g.::
|
||||
|
||||
from sqlalchemy import event
|
||||
from sqlalchemy.schema import UniqueConstraint
|
||||
|
||||
def unique_constraint_name(const, table):
|
||||
const.name = "uq_%s_%s" % (
|
||||
table.name,
|
||||
list(const.columns)[0].name
|
||||
)
|
||||
event.listen(
|
||||
UniqueConstraint,
|
||||
"after_parent_attach",
|
||||
unique_constraint_name)
|
||||
|
||||
|
||||
A given function can also be invoked for only the first invocation
|
||||
of the event using the ``once`` argument::
|
||||
|
||||
def on_config():
|
||||
do_config()
|
||||
|
||||
event.listen(Mapper, "before_configure", on_config, once=True)
|
||||
|
||||
.. versionadded:: 0.9.4 Added ``once=True`` to :func:`.event.listen`
|
||||
and :func:`.event.listens_for`.
|
||||
|
||||
.. note::
|
||||
|
||||
The :func:`.listen` function cannot be called at the same time
|
||||
that the target event is being run. This has implications
|
||||
for thread safety, and also means an event cannot be added
|
||||
from inside the listener function for itself. The list of
|
||||
events to be run are present inside of a mutable collection
|
||||
that can't be changed during iteration.
|
||||
|
||||
Event registration and removal is not intended to be a "high
|
||||
velocity" operation; it is a configurational operation. For
|
||||
systems that need to quickly associate and deassociate with
|
||||
events at high scale, use a mutable structure that is handled
|
||||
from inside of a single listener.
|
||||
|
||||
.. versionchanged:: 1.0.0 - a ``collections.deque()`` object is now
|
||||
used as the container for the list of events, which explicitly
|
||||
disallows collection mutation while the collection is being
|
||||
iterated.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.listens_for`
|
||||
|
||||
:func:`.remove`
|
||||
|
||||
"""
|
||||
|
||||
_event_key(target, identifier, fn).listen(*args, **kw)
|
||||
|
||||
|
||||
def listens_for(target, identifier, *args, **kw):
|
||||
"""Decorate a function as a listener for the given target + identifier.
|
||||
|
||||
e.g.::
|
||||
|
||||
from sqlalchemy import event
|
||||
from sqlalchemy.schema import UniqueConstraint
|
||||
|
||||
@event.listens_for(UniqueConstraint, "after_parent_attach")
|
||||
def unique_constraint_name(const, table):
|
||||
const.name = "uq_%s_%s" % (
|
||||
table.name,
|
||||
list(const.columns)[0].name
|
||||
)
|
||||
|
||||
A given function can also be invoked for only the first invocation
|
||||
of the event using the ``once`` argument::
|
||||
|
||||
@event.listens_for(Mapper, "before_configure", once=True)
|
||||
def on_config():
|
||||
do_config()
|
||||
|
||||
|
||||
.. versionadded:: 0.9.4 Added ``once=True`` to :func:`.event.listen`
|
||||
and :func:`.event.listens_for`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.listen` - general description of event listening
|
||||
|
||||
"""
|
||||
def decorate(fn):
|
||||
listen(target, identifier, fn, *args, **kw)
|
||||
return fn
|
||||
return decorate
|
||||
|
||||
|
||||
def remove(target, identifier, fn):
|
||||
"""Remove an event listener.
|
||||
|
||||
The arguments here should match exactly those which were sent to
|
||||
:func:`.listen`; all the event registration which proceeded as a result
|
||||
of this call will be reverted by calling :func:`.remove` with the same
|
||||
arguments.
|
||||
|
||||
e.g.::
|
||||
|
||||
# if a function was registered like this...
|
||||
@event.listens_for(SomeMappedClass, "before_insert", propagate=True)
|
||||
def my_listener_function(*arg):
|
||||
pass
|
||||
|
||||
# ... it's removed like this
|
||||
event.remove(SomeMappedClass, "before_insert", my_listener_function)
|
||||
|
||||
Above, the listener function associated with ``SomeMappedClass`` was also
|
||||
propagated to subclasses of ``SomeMappedClass``; the :func:`.remove`
|
||||
function will revert all of these operations.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
.. note::
|
||||
|
||||
The :func:`.remove` function cannot be called at the same time
|
||||
that the target event is being run. This has implications
|
||||
for thread safety, and also means an event cannot be removed
|
||||
from inside the listener function for itself. The list of
|
||||
events to be run are present inside of a mutable collection
|
||||
that can't be changed during iteration.
|
||||
|
||||
Event registration and removal is not intended to be a "high
|
||||
velocity" operation; it is a configurational operation. For
|
||||
systems that need to quickly associate and deassociate with
|
||||
events at high scale, use a mutable structure that is handled
|
||||
from inside of a single listener.
|
||||
|
||||
.. versionchanged:: 1.0.0 - a ``collections.deque()`` object is now
|
||||
used as the container for the list of events, which explicitly
|
||||
disallows collection mutation while the collection is being
|
||||
iterated.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.listen`
|
||||
|
||||
"""
|
||||
_event_key(target, identifier, fn).remove()
|
||||
|
||||
|
||||
def contains(target, identifier, fn):
|
||||
"""Return True if the given target/ident/fn is set up to listen.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
"""
|
||||
|
||||
return _event_key(target, identifier, fn).contains()
|
|
@ -0,0 +1,401 @@
|
|||
# event/attr.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Attribute implementation for _Dispatch classes.
|
||||
|
||||
The various listener targets for a particular event class are represented
|
||||
as attributes, which refer to collections of listeners to be fired off.
|
||||
These collections can exist at the class level as well as at the instance
|
||||
level. An event is fired off using code like this::
|
||||
|
||||
some_object.dispatch.first_connect(arg1, arg2)
|
||||
|
||||
Above, ``some_object.dispatch`` would be an instance of ``_Dispatch`` and
|
||||
``first_connect`` is typically an instance of ``_ListenerCollection``
|
||||
if event listeners are present, or ``_EmptyListener`` if none are present.
|
||||
|
||||
The attribute mechanics here spend effort trying to ensure listener functions
|
||||
are available with a minimum of function call overhead, that unnecessary
|
||||
objects aren't created (i.e. many empty per-instance listener collections),
|
||||
as well as that everything is garbage collectable when owning references are
|
||||
lost. Other features such as "propagation" of listener functions across
|
||||
many ``_Dispatch`` instances, "joining" of multiple ``_Dispatch`` instances,
|
||||
as well as support for subclass propagation (e.g. events assigned to
|
||||
``Pool`` vs. ``QueuePool``) are all implemented here.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, with_statement
|
||||
from .. import exc
|
||||
from .. import util
|
||||
from ..util import threading
|
||||
from . import registry
|
||||
from . import legacy
|
||||
from itertools import chain
|
||||
import weakref
|
||||
import collections
|
||||
|
||||
|
||||
class RefCollection(util.MemoizedSlots):
|
||||
__slots__ = 'ref',
|
||||
|
||||
def _memoized_attr_ref(self):
|
||||
return weakref.ref(self, registry._collection_gced)
|
||||
|
||||
|
||||
class _empty_collection(object):
|
||||
def append(self, element):
|
||||
pass
|
||||
|
||||
def extend(self, other):
|
||||
pass
|
||||
|
||||
def remove(self, element):
|
||||
pass
|
||||
|
||||
def __iter__(self):
|
||||
return iter([])
|
||||
|
||||
def clear(self):
|
||||
pass
|
||||
|
||||
|
||||
class _ClsLevelDispatch(RefCollection):
|
||||
"""Class-level events on :class:`._Dispatch` classes."""
|
||||
|
||||
__slots__ = ('name', 'arg_names', 'has_kw',
|
||||
'legacy_signatures', '_clslevel', '__weakref__')
|
||||
|
||||
def __init__(self, parent_dispatch_cls, fn):
|
||||
self.name = fn.__name__
|
||||
argspec = util.inspect_getargspec(fn)
|
||||
self.arg_names = argspec.args[1:]
|
||||
self.has_kw = bool(argspec.keywords)
|
||||
self.legacy_signatures = list(reversed(
|
||||
sorted(
|
||||
getattr(fn, '_legacy_signatures', []),
|
||||
key=lambda s: s[0]
|
||||
)
|
||||
))
|
||||
fn.__doc__ = legacy._augment_fn_docs(self, parent_dispatch_cls, fn)
|
||||
|
||||
self._clslevel = weakref.WeakKeyDictionary()
|
||||
|
||||
def _adjust_fn_spec(self, fn, named):
|
||||
if named:
|
||||
fn = self._wrap_fn_for_kw(fn)
|
||||
if self.legacy_signatures:
|
||||
try:
|
||||
argspec = util.get_callable_argspec(fn, no_self=True)
|
||||
except TypeError:
|
||||
pass
|
||||
else:
|
||||
fn = legacy._wrap_fn_for_legacy(self, fn, argspec)
|
||||
return fn
|
||||
|
||||
def _wrap_fn_for_kw(self, fn):
|
||||
def wrap_kw(*args, **kw):
|
||||
argdict = dict(zip(self.arg_names, args))
|
||||
argdict.update(kw)
|
||||
return fn(**argdict)
|
||||
return wrap_kw
|
||||
|
||||
def insert(self, event_key, propagate):
|
||||
target = event_key.dispatch_target
|
||||
assert isinstance(target, type), \
|
||||
"Class-level Event targets must be classes."
|
||||
if not getattr(target, '_sa_propagate_class_events', True):
|
||||
raise exc.InvalidRequestError(
|
||||
"Can't assign an event directly to the %s class" % target)
|
||||
stack = [target]
|
||||
while stack:
|
||||
cls = stack.pop(0)
|
||||
stack.extend(cls.__subclasses__())
|
||||
if cls is not target and cls not in self._clslevel:
|
||||
self.update_subclass(cls)
|
||||
else:
|
||||
if cls not in self._clslevel:
|
||||
self._assign_cls_collection(cls)
|
||||
self._clslevel[cls].appendleft(event_key._listen_fn)
|
||||
registry._stored_in_collection(event_key, self)
|
||||
|
||||
def append(self, event_key, propagate):
|
||||
target = event_key.dispatch_target
|
||||
assert isinstance(target, type), \
|
||||
"Class-level Event targets must be classes."
|
||||
if not getattr(target, '_sa_propagate_class_events', True):
|
||||
raise exc.InvalidRequestError(
|
||||
"Can't assign an event directly to the %s class" % target)
|
||||
stack = [target]
|
||||
while stack:
|
||||
cls = stack.pop(0)
|
||||
stack.extend(cls.__subclasses__())
|
||||
if cls is not target and cls not in self._clslevel:
|
||||
self.update_subclass(cls)
|
||||
else:
|
||||
if cls not in self._clslevel:
|
||||
self._assign_cls_collection(cls)
|
||||
self._clslevel[cls].append(event_key._listen_fn)
|
||||
registry._stored_in_collection(event_key, self)
|
||||
|
||||
def _assign_cls_collection(self, target):
|
||||
if getattr(target, '_sa_propagate_class_events', True):
|
||||
self._clslevel[target] = collections.deque()
|
||||
else:
|
||||
self._clslevel[target] = _empty_collection()
|
||||
|
||||
def update_subclass(self, target):
|
||||
if target not in self._clslevel:
|
||||
self._assign_cls_collection(target)
|
||||
clslevel = self._clslevel[target]
|
||||
for cls in target.__mro__[1:]:
|
||||
if cls in self._clslevel:
|
||||
clslevel.extend([
|
||||
fn for fn
|
||||
in self._clslevel[cls]
|
||||
if fn not in clslevel
|
||||
])
|
||||
|
||||
def remove(self, event_key):
|
||||
target = event_key.dispatch_target
|
||||
stack = [target]
|
||||
while stack:
|
||||
cls = stack.pop(0)
|
||||
stack.extend(cls.__subclasses__())
|
||||
if cls in self._clslevel:
|
||||
self._clslevel[cls].remove(event_key._listen_fn)
|
||||
registry._removed_from_collection(event_key, self)
|
||||
|
||||
def clear(self):
|
||||
"""Clear all class level listeners"""
|
||||
|
||||
to_clear = set()
|
||||
for dispatcher in self._clslevel.values():
|
||||
to_clear.update(dispatcher)
|
||||
dispatcher.clear()
|
||||
registry._clear(self, to_clear)
|
||||
|
||||
def for_modify(self, obj):
|
||||
"""Return an event collection which can be modified.
|
||||
|
||||
For _ClsLevelDispatch at the class level of
|
||||
a dispatcher, this returns self.
|
||||
|
||||
"""
|
||||
return self
|
||||
|
||||
|
||||
class _InstanceLevelDispatch(RefCollection):
|
||||
__slots__ = ()
|
||||
|
||||
def _adjust_fn_spec(self, fn, named):
|
||||
return self.parent._adjust_fn_spec(fn, named)
|
||||
|
||||
|
||||
class _EmptyListener(_InstanceLevelDispatch):
|
||||
"""Serves as a proxy interface to the events
|
||||
served by a _ClsLevelDispatch, when there are no
|
||||
instance-level events present.
|
||||
|
||||
Is replaced by _ListenerCollection when instance-level
|
||||
events are added.
|
||||
|
||||
"""
|
||||
|
||||
propagate = frozenset()
|
||||
listeners = ()
|
||||
|
||||
__slots__ = 'parent', 'parent_listeners', 'name'
|
||||
|
||||
def __init__(self, parent, target_cls):
|
||||
if target_cls not in parent._clslevel:
|
||||
parent.update_subclass(target_cls)
|
||||
self.parent = parent # _ClsLevelDispatch
|
||||
self.parent_listeners = parent._clslevel[target_cls]
|
||||
self.name = parent.name
|
||||
|
||||
def for_modify(self, obj):
|
||||
"""Return an event collection which can be modified.
|
||||
|
||||
For _EmptyListener at the instance level of
|
||||
a dispatcher, this generates a new
|
||||
_ListenerCollection, applies it to the instance,
|
||||
and returns it.
|
||||
|
||||
"""
|
||||
result = _ListenerCollection(self.parent, obj._instance_cls)
|
||||
if getattr(obj, self.name) is self:
|
||||
setattr(obj, self.name, result)
|
||||
else:
|
||||
assert isinstance(getattr(obj, self.name), _JoinedListener)
|
||||
return result
|
||||
|
||||
def _needs_modify(self, *args, **kw):
|
||||
raise NotImplementedError("need to call for_modify()")
|
||||
|
||||
exec_once = insert = append = remove = clear = _needs_modify
|
||||
|
||||
def __call__(self, *args, **kw):
|
||||
"""Execute this event."""
|
||||
|
||||
for fn in self.parent_listeners:
|
||||
fn(*args, **kw)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.parent_listeners)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.parent_listeners)
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.parent_listeners)
|
||||
|
||||
__nonzero__ = __bool__
|
||||
|
||||
|
||||
class _CompoundListener(_InstanceLevelDispatch):
|
||||
__slots__ = '_exec_once_mutex', '_exec_once'
|
||||
|
||||
def _memoized_attr__exec_once_mutex(self):
|
||||
return threading.Lock()
|
||||
|
||||
def exec_once(self, *args, **kw):
|
||||
"""Execute this event, but only if it has not been
|
||||
executed already for this collection."""
|
||||
|
||||
if not self._exec_once:
|
||||
with self._exec_once_mutex:
|
||||
if not self._exec_once:
|
||||
try:
|
||||
self(*args, **kw)
|
||||
finally:
|
||||
self._exec_once = True
|
||||
|
||||
def __call__(self, *args, **kw):
|
||||
"""Execute this event."""
|
||||
|
||||
for fn in self.parent_listeners:
|
||||
fn(*args, **kw)
|
||||
for fn in self.listeners:
|
||||
fn(*args, **kw)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.parent_listeners) + len(self.listeners)
|
||||
|
||||
def __iter__(self):
|
||||
return chain(self.parent_listeners, self.listeners)
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.listeners or self.parent_listeners)
|
||||
|
||||
__nonzero__ = __bool__
|
||||
|
||||
|
||||
class _ListenerCollection(_CompoundListener):
|
||||
"""Instance-level attributes on instances of :class:`._Dispatch`.
|
||||
|
||||
Represents a collection of listeners.
|
||||
|
||||
As of 0.7.9, _ListenerCollection is only first
|
||||
created via the _EmptyListener.for_modify() method.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
'parent_listeners', 'parent', 'name', 'listeners',
|
||||
'propagate', '__weakref__')
|
||||
|
||||
def __init__(self, parent, target_cls):
|
||||
if target_cls not in parent._clslevel:
|
||||
parent.update_subclass(target_cls)
|
||||
self._exec_once = False
|
||||
self.parent_listeners = parent._clslevel[target_cls]
|
||||
self.parent = parent
|
||||
self.name = parent.name
|
||||
self.listeners = collections.deque()
|
||||
self.propagate = set()
|
||||
|
||||
def for_modify(self, obj):
|
||||
"""Return an event collection which can be modified.
|
||||
|
||||
For _ListenerCollection at the instance level of
|
||||
a dispatcher, this returns self.
|
||||
|
||||
"""
|
||||
return self
|
||||
|
||||
def _update(self, other, only_propagate=True):
|
||||
"""Populate from the listeners in another :class:`_Dispatch`
|
||||
object."""
|
||||
|
||||
existing_listeners = self.listeners
|
||||
existing_listener_set = set(existing_listeners)
|
||||
self.propagate.update(other.propagate)
|
||||
other_listeners = [l for l
|
||||
in other.listeners
|
||||
if l not in existing_listener_set
|
||||
and not only_propagate or l in self.propagate
|
||||
]
|
||||
|
||||
existing_listeners.extend(other_listeners)
|
||||
|
||||
to_associate = other.propagate.union(other_listeners)
|
||||
registry._stored_in_collection_multi(self, other, to_associate)
|
||||
|
||||
def insert(self, event_key, propagate):
|
||||
if event_key.prepend_to_list(self, self.listeners):
|
||||
if propagate:
|
||||
self.propagate.add(event_key._listen_fn)
|
||||
|
||||
def append(self, event_key, propagate):
|
||||
if event_key.append_to_list(self, self.listeners):
|
||||
if propagate:
|
||||
self.propagate.add(event_key._listen_fn)
|
||||
|
||||
def remove(self, event_key):
|
||||
self.listeners.remove(event_key._listen_fn)
|
||||
self.propagate.discard(event_key._listen_fn)
|
||||
registry._removed_from_collection(event_key, self)
|
||||
|
||||
def clear(self):
|
||||
registry._clear(self, self.listeners)
|
||||
self.propagate.clear()
|
||||
self.listeners.clear()
|
||||
|
||||
|
||||
class _JoinedListener(_CompoundListener):
|
||||
__slots__ = 'parent', 'name', 'local', 'parent_listeners'
|
||||
|
||||
def __init__(self, parent, name, local):
|
||||
self._exec_once = False
|
||||
self.parent = parent
|
||||
self.name = name
|
||||
self.local = local
|
||||
self.parent_listeners = self.local
|
||||
|
||||
@property
|
||||
def listeners(self):
|
||||
return getattr(self.parent, self.name)
|
||||
|
||||
def _adjust_fn_spec(self, fn, named):
|
||||
return self.local._adjust_fn_spec(fn, named)
|
||||
|
||||
def for_modify(self, obj):
|
||||
self.local = self.parent_listeners = self.local.for_modify(obj)
|
||||
return self
|
||||
|
||||
def insert(self, event_key, propagate):
|
||||
self.local.insert(event_key, propagate)
|
||||
|
||||
def append(self, event_key, propagate):
|
||||
self.local.append(event_key, propagate)
|
||||
|
||||
def remove(self, event_key):
|
||||
self.local.remove(event_key)
|
||||
|
||||
def clear(self):
|
||||
raise NotImplementedError()
|
|
@ -0,0 +1,289 @@
|
|||
# event/base.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Base implementation classes.
|
||||
|
||||
The public-facing ``Events`` serves as the base class for an event interface;
|
||||
its public attributes represent different kinds of events. These attributes
|
||||
are mirrored onto a ``_Dispatch`` class, which serves as a container for
|
||||
collections of listener functions. These collections are represented both
|
||||
at the class level of a particular ``_Dispatch`` class as well as within
|
||||
instances of ``_Dispatch``.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import weakref
|
||||
|
||||
from .. import util
|
||||
from .attr import _JoinedListener, \
|
||||
_EmptyListener, _ClsLevelDispatch
|
||||
|
||||
_registrars = util.defaultdict(list)
|
||||
|
||||
|
||||
def _is_event_name(name):
|
||||
return not name.startswith('_') and name != 'dispatch'
|
||||
|
||||
|
||||
class _UnpickleDispatch(object):
|
||||
"""Serializable callable that re-generates an instance of
|
||||
:class:`_Dispatch` given a particular :class:`.Events` subclass.
|
||||
|
||||
"""
|
||||
|
||||
def __call__(self, _instance_cls):
|
||||
for cls in _instance_cls.__mro__:
|
||||
if 'dispatch' in cls.__dict__:
|
||||
return cls.__dict__['dispatch'].\
|
||||
dispatch_cls._for_class(_instance_cls)
|
||||
else:
|
||||
raise AttributeError("No class with a 'dispatch' member present.")
|
||||
|
||||
|
||||
class _Dispatch(object):
|
||||
"""Mirror the event listening definitions of an Events class with
|
||||
listener collections.
|
||||
|
||||
Classes which define a "dispatch" member will return a
|
||||
non-instantiated :class:`._Dispatch` subclass when the member
|
||||
is accessed at the class level. When the "dispatch" member is
|
||||
accessed at the instance level of its owner, an instance
|
||||
of the :class:`._Dispatch` class is returned.
|
||||
|
||||
A :class:`._Dispatch` class is generated for each :class:`.Events`
|
||||
class defined, by the :func:`._create_dispatcher_class` function.
|
||||
The original :class:`.Events` classes remain untouched.
|
||||
This decouples the construction of :class:`.Events` subclasses from
|
||||
the implementation used by the event internals, and allows
|
||||
inspecting tools like Sphinx to work in an unsurprising
|
||||
way against the public API.
|
||||
|
||||
"""
|
||||
|
||||
# in one ORM edge case, an attribute is added to _Dispatch,
|
||||
# so __dict__ is used in just that case and potentially others.
|
||||
__slots__ = '_parent', '_instance_cls', '__dict__', '_empty_listeners'
|
||||
|
||||
_empty_listener_reg = weakref.WeakKeyDictionary()
|
||||
|
||||
def __init__(self, parent, instance_cls=None):
|
||||
self._parent = parent
|
||||
self._instance_cls = instance_cls
|
||||
if instance_cls:
|
||||
try:
|
||||
self._empty_listeners = self._empty_listener_reg[instance_cls]
|
||||
except KeyError:
|
||||
self._empty_listeners = \
|
||||
self._empty_listener_reg[instance_cls] = dict(
|
||||
(ls.name, _EmptyListener(ls, instance_cls))
|
||||
for ls in parent._event_descriptors
|
||||
)
|
||||
else:
|
||||
self._empty_listeners = {}
|
||||
|
||||
def __getattr__(self, name):
|
||||
# assign EmptyListeners as attributes on demand
|
||||
# to reduce startup time for new dispatch objects
|
||||
try:
|
||||
ls = self._empty_listeners[name]
|
||||
except KeyError:
|
||||
raise AttributeError(name)
|
||||
else:
|
||||
setattr(self, ls.name, ls)
|
||||
return ls
|
||||
|
||||
@property
|
||||
def _event_descriptors(self):
|
||||
for k in self._event_names:
|
||||
yield getattr(self, k)
|
||||
|
||||
def _for_class(self, instance_cls):
|
||||
return self.__class__(self, instance_cls)
|
||||
|
||||
def _for_instance(self, instance):
|
||||
instance_cls = instance.__class__
|
||||
return self._for_class(instance_cls)
|
||||
|
||||
@property
|
||||
def _listen(self):
|
||||
return self._events._listen
|
||||
|
||||
def _join(self, other):
|
||||
"""Create a 'join' of this :class:`._Dispatch` and another.
|
||||
|
||||
This new dispatcher will dispatch events to both
|
||||
:class:`._Dispatch` objects.
|
||||
|
||||
"""
|
||||
if '_joined_dispatch_cls' not in self.__class__.__dict__:
|
||||
cls = type(
|
||||
"Joined%s" % self.__class__.__name__,
|
||||
(_JoinedDispatcher, ), {'__slots__': self._event_names}
|
||||
)
|
||||
|
||||
self.__class__._joined_dispatch_cls = cls
|
||||
return self._joined_dispatch_cls(self, other)
|
||||
|
||||
def __reduce__(self):
|
||||
return _UnpickleDispatch(), (self._instance_cls, )
|
||||
|
||||
def _update(self, other, only_propagate=True):
|
||||
"""Populate from the listeners in another :class:`_Dispatch`
|
||||
object."""
|
||||
for ls in other._event_descriptors:
|
||||
if isinstance(ls, _EmptyListener):
|
||||
continue
|
||||
getattr(self, ls.name).\
|
||||
for_modify(self)._update(ls, only_propagate=only_propagate)
|
||||
|
||||
def _clear(self):
|
||||
for ls in self._event_descriptors:
|
||||
ls.for_modify(self).clear()
|
||||
|
||||
|
||||
class _EventMeta(type):
|
||||
"""Intercept new Event subclasses and create
|
||||
associated _Dispatch classes."""
|
||||
|
||||
def __init__(cls, classname, bases, dict_):
|
||||
_create_dispatcher_class(cls, classname, bases, dict_)
|
||||
return type.__init__(cls, classname, bases, dict_)
|
||||
|
||||
|
||||
def _create_dispatcher_class(cls, classname, bases, dict_):
|
||||
"""Create a :class:`._Dispatch` class corresponding to an
|
||||
:class:`.Events` class."""
|
||||
|
||||
# there's all kinds of ways to do this,
|
||||
# i.e. make a Dispatch class that shares the '_listen' method
|
||||
# of the Event class, this is the straight monkeypatch.
|
||||
if hasattr(cls, 'dispatch'):
|
||||
dispatch_base = cls.dispatch.__class__
|
||||
else:
|
||||
dispatch_base = _Dispatch
|
||||
|
||||
event_names = [k for k in dict_ if _is_event_name(k)]
|
||||
dispatch_cls = type("%sDispatch" % classname,
|
||||
(dispatch_base, ), {'__slots__': event_names})
|
||||
|
||||
dispatch_cls._event_names = event_names
|
||||
|
||||
dispatch_inst = cls._set_dispatch(cls, dispatch_cls)
|
||||
for k in dispatch_cls._event_names:
|
||||
setattr(dispatch_inst, k, _ClsLevelDispatch(cls, dict_[k]))
|
||||
_registrars[k].append(cls)
|
||||
|
||||
for super_ in dispatch_cls.__bases__:
|
||||
if issubclass(super_, _Dispatch) and super_ is not _Dispatch:
|
||||
for ls in super_._events.dispatch._event_descriptors:
|
||||
setattr(dispatch_inst, ls.name, ls)
|
||||
dispatch_cls._event_names.append(ls.name)
|
||||
|
||||
if getattr(cls, '_dispatch_target', None):
|
||||
cls._dispatch_target.dispatch = dispatcher(cls)
|
||||
|
||||
|
||||
def _remove_dispatcher(cls):
|
||||
for k in cls.dispatch._event_names:
|
||||
_registrars[k].remove(cls)
|
||||
if not _registrars[k]:
|
||||
del _registrars[k]
|
||||
|
||||
|
||||
class Events(util.with_metaclass(_EventMeta, object)):
|
||||
"""Define event listening functions for a particular target type."""
|
||||
|
||||
@staticmethod
|
||||
def _set_dispatch(cls, dispatch_cls):
|
||||
# this allows an Events subclass to define additional utility
|
||||
# methods made available to the target via
|
||||
# "self.dispatch._events.<utilitymethod>"
|
||||
# @staticemethod to allow easy "super" calls while in a metaclass
|
||||
# constructor.
|
||||
cls.dispatch = dispatch_cls(None)
|
||||
dispatch_cls._events = cls
|
||||
return cls.dispatch
|
||||
|
||||
@classmethod
|
||||
def _accept_with(cls, target):
|
||||
# Mapper, ClassManager, Session override this to
|
||||
# also accept classes, scoped_sessions, sessionmakers, etc.
|
||||
if hasattr(target, 'dispatch') and (
|
||||
|
||||
isinstance(target.dispatch, cls.dispatch.__class__) or
|
||||
|
||||
|
||||
(
|
||||
isinstance(target.dispatch, type) and
|
||||
isinstance(target.dispatch, cls.dispatch.__class__)
|
||||
) or
|
||||
|
||||
(
|
||||
isinstance(target.dispatch, _JoinedDispatcher) and
|
||||
isinstance(target.dispatch.parent, cls.dispatch.__class__)
|
||||
)
|
||||
|
||||
|
||||
):
|
||||
return target
|
||||
else:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _listen(cls, event_key, propagate=False, insert=False, named=False):
|
||||
event_key.base_listen(propagate=propagate, insert=insert, named=named)
|
||||
|
||||
@classmethod
|
||||
def _remove(cls, event_key):
|
||||
event_key.remove()
|
||||
|
||||
@classmethod
|
||||
def _clear(cls):
|
||||
cls.dispatch._clear()
|
||||
|
||||
|
||||
class _JoinedDispatcher(object):
|
||||
"""Represent a connection between two _Dispatch objects."""
|
||||
|
||||
__slots__ = 'local', 'parent', '_instance_cls'
|
||||
|
||||
def __init__(self, local, parent):
|
||||
self.local = local
|
||||
self.parent = parent
|
||||
self._instance_cls = self.local._instance_cls
|
||||
|
||||
def __getattr__(self, name):
|
||||
# assign _JoinedListeners as attributes on demand
|
||||
# to reduce startup time for new dispatch objects
|
||||
ls = getattr(self.local, name)
|
||||
jl = _JoinedListener(self.parent, ls.name, ls)
|
||||
setattr(self, ls.name, jl)
|
||||
return jl
|
||||
|
||||
@property
|
||||
def _listen(self):
|
||||
return self.parent._listen
|
||||
|
||||
|
||||
class dispatcher(object):
|
||||
"""Descriptor used by target classes to
|
||||
deliver the _Dispatch class at the class level
|
||||
and produce new _Dispatch instances for target
|
||||
instances.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, events):
|
||||
self.dispatch_cls = events.dispatch
|
||||
self.events = events
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return self.dispatch_cls
|
||||
obj.__dict__['dispatch'] = disp = self.dispatch_cls._for_instance(obj)
|
||||
return disp
|
|
@ -0,0 +1,169 @@
|
|||
# event/legacy.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Routines to handle adaption of legacy call signatures,
|
||||
generation of deprecation notes and docstrings.
|
||||
|
||||
"""
|
||||
|
||||
from .. import util
|
||||
|
||||
|
||||
def _legacy_signature(since, argnames, converter=None):
|
||||
def leg(fn):
|
||||
if not hasattr(fn, '_legacy_signatures'):
|
||||
fn._legacy_signatures = []
|
||||
fn._legacy_signatures.append((since, argnames, converter))
|
||||
return fn
|
||||
return leg
|
||||
|
||||
|
||||
def _wrap_fn_for_legacy(dispatch_collection, fn, argspec):
|
||||
for since, argnames, conv in dispatch_collection.legacy_signatures:
|
||||
if argnames[-1] == "**kw":
|
||||
has_kw = True
|
||||
argnames = argnames[0:-1]
|
||||
else:
|
||||
has_kw = False
|
||||
|
||||
if len(argnames) == len(argspec.args) \
|
||||
and has_kw is bool(argspec.keywords):
|
||||
|
||||
if conv:
|
||||
assert not has_kw
|
||||
|
||||
def wrap_leg(*args):
|
||||
return fn(*conv(*args))
|
||||
else:
|
||||
def wrap_leg(*args, **kw):
|
||||
argdict = dict(zip(dispatch_collection.arg_names, args))
|
||||
args = [argdict[name] for name in argnames]
|
||||
if has_kw:
|
||||
return fn(*args, **kw)
|
||||
else:
|
||||
return fn(*args)
|
||||
return wrap_leg
|
||||
else:
|
||||
return fn
|
||||
|
||||
|
||||
def _indent(text, indent):
|
||||
return "\n".join(
|
||||
indent + line
|
||||
for line in text.split("\n")
|
||||
)
|
||||
|
||||
|
||||
def _standard_listen_example(dispatch_collection, sample_target, fn):
|
||||
example_kw_arg = _indent(
|
||||
"\n".join(
|
||||
"%(arg)s = kw['%(arg)s']" % {"arg": arg}
|
||||
for arg in dispatch_collection.arg_names[0:2]
|
||||
),
|
||||
" ")
|
||||
if dispatch_collection.legacy_signatures:
|
||||
current_since = max(since for since, args, conv
|
||||
in dispatch_collection.legacy_signatures)
|
||||
else:
|
||||
current_since = None
|
||||
text = (
|
||||
"from sqlalchemy import event\n\n"
|
||||
"# standard decorator style%(current_since)s\n"
|
||||
"@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
|
||||
"def receive_%(event_name)s("
|
||||
"%(named_event_arguments)s%(has_kw_arguments)s):\n"
|
||||
" \"listen for the '%(event_name)s' event\"\n"
|
||||
"\n # ... (event handling logic) ...\n"
|
||||
)
|
||||
|
||||
if len(dispatch_collection.arg_names) > 3:
|
||||
text += (
|
||||
|
||||
"\n# named argument style (new in 0.9)\n"
|
||||
"@event.listens_for("
|
||||
"%(sample_target)s, '%(event_name)s', named=True)\n"
|
||||
"def receive_%(event_name)s(**kw):\n"
|
||||
" \"listen for the '%(event_name)s' event\"\n"
|
||||
"%(example_kw_arg)s\n"
|
||||
"\n # ... (event handling logic) ...\n"
|
||||
)
|
||||
|
||||
text %= {
|
||||
"current_since": " (arguments as of %s)" %
|
||||
current_since if current_since else "",
|
||||
"event_name": fn.__name__,
|
||||
"has_kw_arguments": ", **kw" if dispatch_collection.has_kw else "",
|
||||
"named_event_arguments": ", ".join(dispatch_collection.arg_names),
|
||||
"example_kw_arg": example_kw_arg,
|
||||
"sample_target": sample_target
|
||||
}
|
||||
return text
|
||||
|
||||
|
||||
def _legacy_listen_examples(dispatch_collection, sample_target, fn):
|
||||
text = ""
|
||||
for since, args, conv in dispatch_collection.legacy_signatures:
|
||||
text += (
|
||||
"\n# legacy calling style (pre-%(since)s)\n"
|
||||
"@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
|
||||
"def receive_%(event_name)s("
|
||||
"%(named_event_arguments)s%(has_kw_arguments)s):\n"
|
||||
" \"listen for the '%(event_name)s' event\"\n"
|
||||
"\n # ... (event handling logic) ...\n" % {
|
||||
"since": since,
|
||||
"event_name": fn.__name__,
|
||||
"has_kw_arguments": " **kw"
|
||||
if dispatch_collection.has_kw else "",
|
||||
"named_event_arguments": ", ".join(args),
|
||||
"sample_target": sample_target
|
||||
}
|
||||
)
|
||||
return text
|
||||
|
||||
|
||||
def _version_signature_changes(dispatch_collection):
|
||||
since, args, conv = dispatch_collection.legacy_signatures[0]
|
||||
return (
|
||||
"\n.. versionchanged:: %(since)s\n"
|
||||
" The ``%(event_name)s`` event now accepts the \n"
|
||||
" arguments ``%(named_event_arguments)s%(has_kw_arguments)s``.\n"
|
||||
" Listener functions which accept the previous argument \n"
|
||||
" signature(s) listed above will be automatically \n"
|
||||
" adapted to the new signature." % {
|
||||
"since": since,
|
||||
"event_name": dispatch_collection.name,
|
||||
"named_event_arguments": ", ".join(dispatch_collection.arg_names),
|
||||
"has_kw_arguments": ", **kw" if dispatch_collection.has_kw else ""
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _augment_fn_docs(dispatch_collection, parent_dispatch_cls, fn):
|
||||
header = ".. container:: event_signatures\n\n"\
|
||||
" Example argument forms::\n"\
|
||||
"\n"
|
||||
|
||||
sample_target = getattr(parent_dispatch_cls, "_target_class_doc", "obj")
|
||||
text = (
|
||||
header +
|
||||
_indent(
|
||||
_standard_listen_example(
|
||||
dispatch_collection, sample_target, fn),
|
||||
" " * 8)
|
||||
)
|
||||
if dispatch_collection.legacy_signatures:
|
||||
text += _indent(
|
||||
_legacy_listen_examples(
|
||||
dispatch_collection, sample_target, fn),
|
||||
" " * 8)
|
||||
|
||||
text += _version_signature_changes(dispatch_collection)
|
||||
|
||||
return util.inject_docstring_text(fn.__doc__,
|
||||
text,
|
||||
1
|
||||
)
|
|
@ -0,0 +1,262 @@
|
|||
# event/registry.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Provides managed registration services on behalf of :func:`.listen`
|
||||
arguments.
|
||||
|
||||
By "managed registration", we mean that event listening functions and
|
||||
other objects can be added to various collections in such a way that their
|
||||
membership in all those collections can be revoked at once, based on
|
||||
an equivalent :class:`._EventKey`.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import weakref
|
||||
import collections
|
||||
import types
|
||||
from .. import exc, util
|
||||
|
||||
|
||||
_key_to_collection = collections.defaultdict(dict)
|
||||
"""
|
||||
Given an original listen() argument, can locate all
|
||||
listener collections and the listener fn contained
|
||||
|
||||
(target, identifier, fn) -> {
|
||||
ref(listenercollection) -> ref(listener_fn)
|
||||
ref(listenercollection) -> ref(listener_fn)
|
||||
ref(listenercollection) -> ref(listener_fn)
|
||||
}
|
||||
"""
|
||||
|
||||
_collection_to_key = collections.defaultdict(dict)
|
||||
"""
|
||||
Given a _ListenerCollection or _ClsLevelListener, can locate
|
||||
all the original listen() arguments and the listener fn contained
|
||||
|
||||
ref(listenercollection) -> {
|
||||
ref(listener_fn) -> (target, identifier, fn),
|
||||
ref(listener_fn) -> (target, identifier, fn),
|
||||
ref(listener_fn) -> (target, identifier, fn),
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def _collection_gced(ref):
|
||||
# defaultdict, so can't get a KeyError
|
||||
if not _collection_to_key or ref not in _collection_to_key:
|
||||
return
|
||||
listener_to_key = _collection_to_key.pop(ref)
|
||||
for key in listener_to_key.values():
|
||||
if key in _key_to_collection:
|
||||
# defaultdict, so can't get a KeyError
|
||||
dispatch_reg = _key_to_collection[key]
|
||||
dispatch_reg.pop(ref)
|
||||
if not dispatch_reg:
|
||||
_key_to_collection.pop(key)
|
||||
|
||||
|
||||
def _stored_in_collection(event_key, owner):
|
||||
key = event_key._key
|
||||
|
||||
dispatch_reg = _key_to_collection[key]
|
||||
|
||||
owner_ref = owner.ref
|
||||
listen_ref = weakref.ref(event_key._listen_fn)
|
||||
|
||||
if owner_ref in dispatch_reg:
|
||||
return False
|
||||
|
||||
dispatch_reg[owner_ref] = listen_ref
|
||||
|
||||
listener_to_key = _collection_to_key[owner_ref]
|
||||
listener_to_key[listen_ref] = key
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _removed_from_collection(event_key, owner):
|
||||
key = event_key._key
|
||||
|
||||
dispatch_reg = _key_to_collection[key]
|
||||
|
||||
listen_ref = weakref.ref(event_key._listen_fn)
|
||||
|
||||
owner_ref = owner.ref
|
||||
dispatch_reg.pop(owner_ref, None)
|
||||
if not dispatch_reg:
|
||||
del _key_to_collection[key]
|
||||
|
||||
if owner_ref in _collection_to_key:
|
||||
listener_to_key = _collection_to_key[owner_ref]
|
||||
listener_to_key.pop(listen_ref)
|
||||
|
||||
|
||||
def _stored_in_collection_multi(newowner, oldowner, elements):
|
||||
if not elements:
|
||||
return
|
||||
|
||||
oldowner = oldowner.ref
|
||||
newowner = newowner.ref
|
||||
|
||||
old_listener_to_key = _collection_to_key[oldowner]
|
||||
new_listener_to_key = _collection_to_key[newowner]
|
||||
|
||||
for listen_fn in elements:
|
||||
listen_ref = weakref.ref(listen_fn)
|
||||
key = old_listener_to_key[listen_ref]
|
||||
dispatch_reg = _key_to_collection[key]
|
||||
if newowner in dispatch_reg:
|
||||
assert dispatch_reg[newowner] == listen_ref
|
||||
else:
|
||||
dispatch_reg[newowner] = listen_ref
|
||||
|
||||
new_listener_to_key[listen_ref] = key
|
||||
|
||||
|
||||
def _clear(owner, elements):
|
||||
if not elements:
|
||||
return
|
||||
|
||||
owner = owner.ref
|
||||
listener_to_key = _collection_to_key[owner]
|
||||
for listen_fn in elements:
|
||||
listen_ref = weakref.ref(listen_fn)
|
||||
key = listener_to_key[listen_ref]
|
||||
dispatch_reg = _key_to_collection[key]
|
||||
dispatch_reg.pop(owner, None)
|
||||
|
||||
if not dispatch_reg:
|
||||
del _key_to_collection[key]
|
||||
|
||||
|
||||
class _EventKey(object):
|
||||
"""Represent :func:`.listen` arguments.
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
'target', 'identifier', 'fn', 'fn_key', 'fn_wrap', 'dispatch_target'
|
||||
)
|
||||
|
||||
def __init__(self, target, identifier,
|
||||
fn, dispatch_target, _fn_wrap=None):
|
||||
self.target = target
|
||||
self.identifier = identifier
|
||||
self.fn = fn
|
||||
if isinstance(fn, types.MethodType):
|
||||
self.fn_key = id(fn.__func__), id(fn.__self__)
|
||||
else:
|
||||
self.fn_key = id(fn)
|
||||
self.fn_wrap = _fn_wrap
|
||||
self.dispatch_target = dispatch_target
|
||||
|
||||
@property
|
||||
def _key(self):
|
||||
return (id(self.target), self.identifier, self.fn_key)
|
||||
|
||||
def with_wrapper(self, fn_wrap):
|
||||
if fn_wrap is self._listen_fn:
|
||||
return self
|
||||
else:
|
||||
return _EventKey(
|
||||
self.target,
|
||||
self.identifier,
|
||||
self.fn,
|
||||
self.dispatch_target,
|
||||
_fn_wrap=fn_wrap
|
||||
)
|
||||
|
||||
def with_dispatch_target(self, dispatch_target):
|
||||
if dispatch_target is self.dispatch_target:
|
||||
return self
|
||||
else:
|
||||
return _EventKey(
|
||||
self.target,
|
||||
self.identifier,
|
||||
self.fn,
|
||||
dispatch_target,
|
||||
_fn_wrap=self.fn_wrap
|
||||
)
|
||||
|
||||
def listen(self, *args, **kw):
|
||||
once = kw.pop("once", False)
|
||||
named = kw.pop("named", False)
|
||||
|
||||
target, identifier, fn = \
|
||||
self.dispatch_target, self.identifier, self._listen_fn
|
||||
|
||||
dispatch_collection = getattr(target.dispatch, identifier)
|
||||
|
||||
adjusted_fn = dispatch_collection._adjust_fn_spec(fn, named)
|
||||
|
||||
self = self.with_wrapper(adjusted_fn)
|
||||
|
||||
if once:
|
||||
self.with_wrapper(
|
||||
util.only_once(self._listen_fn)).listen(*args, **kw)
|
||||
else:
|
||||
self.dispatch_target.dispatch._listen(self, *args, **kw)
|
||||
|
||||
def remove(self):
|
||||
key = self._key
|
||||
|
||||
if key not in _key_to_collection:
|
||||
raise exc.InvalidRequestError(
|
||||
"No listeners found for event %s / %r / %s " %
|
||||
(self.target, self.identifier, self.fn)
|
||||
)
|
||||
dispatch_reg = _key_to_collection.pop(key)
|
||||
|
||||
for collection_ref, listener_ref in dispatch_reg.items():
|
||||
collection = collection_ref()
|
||||
listener_fn = listener_ref()
|
||||
if collection is not None and listener_fn is not None:
|
||||
collection.remove(self.with_wrapper(listener_fn))
|
||||
|
||||
def contains(self):
|
||||
"""Return True if this event key is registered to listen.
|
||||
"""
|
||||
return self._key in _key_to_collection
|
||||
|
||||
def base_listen(self, propagate=False, insert=False,
|
||||
named=False):
|
||||
|
||||
target, identifier, fn = \
|
||||
self.dispatch_target, self.identifier, self._listen_fn
|
||||
|
||||
dispatch_collection = getattr(target.dispatch, identifier)
|
||||
|
||||
if insert:
|
||||
dispatch_collection.\
|
||||
for_modify(target.dispatch).insert(self, propagate)
|
||||
else:
|
||||
dispatch_collection.\
|
||||
for_modify(target.dispatch).append(self, propagate)
|
||||
|
||||
@property
|
||||
def _listen_fn(self):
|
||||
return self.fn_wrap or self.fn
|
||||
|
||||
def append_to_list(self, owner, list_):
|
||||
if _stored_in_collection(self, owner):
|
||||
list_.append(self._listen_fn)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def remove_from_list(self, owner, list_):
|
||||
_removed_from_collection(self, owner)
|
||||
list_.remove(self._listen_fn)
|
||||
|
||||
def prepend_to_list(self, owner, list_):
|
||||
if _stored_in_collection(self, owner):
|
||||
list_.appendleft(self._listen_fn)
|
||||
return True
|
||||
else:
|
||||
return False
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,480 @@
|
|||
# sqlalchemy/exc.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Exceptions used with SQLAlchemy.
|
||||
|
||||
The base exception class is :exc:`.SQLAlchemyError`. Exceptions which are
|
||||
raised as a result of DBAPI exceptions are all subclasses of
|
||||
:exc:`.DBAPIError`.
|
||||
|
||||
"""
|
||||
|
||||
from .util import compat
|
||||
|
||||
|
||||
class SQLAlchemyError(Exception):
|
||||
"""Generic error class."""
|
||||
|
||||
code = None
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
code = kw.pop('code', None)
|
||||
if code is not None:
|
||||
self.code = code
|
||||
super(SQLAlchemyError, self).__init__(*arg, **kw)
|
||||
|
||||
def _code_str(self):
|
||||
if not self.code:
|
||||
return ""
|
||||
else:
|
||||
return (
|
||||
"(Background on this error at: "
|
||||
"http://sqlalche.me/e/%s)" % (self.code, )
|
||||
)
|
||||
|
||||
def _message(self):
|
||||
# get string representation just like Exception.__str__(self),
|
||||
# but also support if the string has non-ascii chars
|
||||
if len(self.args) == 1:
|
||||
return compat.text_type(self.args[0])
|
||||
else:
|
||||
return compat.text_type(self.args)
|
||||
|
||||
def __str__(self):
|
||||
message = self._message()
|
||||
|
||||
if self.code:
|
||||
message = (
|
||||
"%s %s" % (message, self._code_str())
|
||||
)
|
||||
|
||||
return message
|
||||
|
||||
def __unicode__(self):
|
||||
return self.__str__()
|
||||
|
||||
|
||||
class ArgumentError(SQLAlchemyError):
|
||||
"""Raised when an invalid or conflicting function argument is supplied.
|
||||
|
||||
This error generally corresponds to construction time state errors.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class ObjectNotExecutableError(ArgumentError):
|
||||
"""Raised when an object is passed to .execute() that can't be
|
||||
executed as SQL.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, target):
|
||||
super(ObjectNotExecutableError, self).__init__(
|
||||
"Not an executable object: %r" % target
|
||||
)
|
||||
|
||||
|
||||
class NoSuchModuleError(ArgumentError):
|
||||
"""Raised when a dynamically-loaded module (usually a database dialect)
|
||||
of a particular name cannot be located."""
|
||||
|
||||
|
||||
class NoForeignKeysError(ArgumentError):
|
||||
"""Raised when no foreign keys can be located between two selectables
|
||||
during a join."""
|
||||
|
||||
|
||||
class AmbiguousForeignKeysError(ArgumentError):
|
||||
"""Raised when more than one foreign key matching can be located
|
||||
between two selectables during a join."""
|
||||
|
||||
|
||||
class CircularDependencyError(SQLAlchemyError):
|
||||
"""Raised by topological sorts when a circular dependency is detected.
|
||||
|
||||
There are two scenarios where this error occurs:
|
||||
|
||||
* In a Session flush operation, if two objects are mutually dependent
|
||||
on each other, they can not be inserted or deleted via INSERT or
|
||||
DELETE statements alone; an UPDATE will be needed to post-associate
|
||||
or pre-deassociate one of the foreign key constrained values.
|
||||
The ``post_update`` flag described at :ref:`post_update` can resolve
|
||||
this cycle.
|
||||
* In a :attr:`.MetaData.sorted_tables` operation, two :class:`.ForeignKey`
|
||||
or :class:`.ForeignKeyConstraint` objects mutually refer to each
|
||||
other. Apply the ``use_alter=True`` flag to one or both,
|
||||
see :ref:`use_alter`.
|
||||
|
||||
"""
|
||||
def __init__(self, message, cycles, edges, msg=None, code=None):
|
||||
if msg is None:
|
||||
message += " (%s)" % ", ".join(repr(s) for s in cycles)
|
||||
else:
|
||||
message = msg
|
||||
SQLAlchemyError.__init__(self, message, code=code)
|
||||
self.cycles = cycles
|
||||
self.edges = edges
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (None, self.cycles,
|
||||
self.edges, self.args[0])
|
||||
|
||||
|
||||
class CompileError(SQLAlchemyError):
|
||||
"""Raised when an error occurs during SQL compilation"""
|
||||
|
||||
|
||||
class UnsupportedCompilationError(CompileError):
|
||||
"""Raised when an operation is not supported by the given compiler.
|
||||
|
||||
|
||||
.. versionadded:: 0.8.3
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, compiler, element_type):
|
||||
super(UnsupportedCompilationError, self).__init__(
|
||||
"Compiler %r can't render element of type %s" %
|
||||
(compiler, element_type))
|
||||
|
||||
|
||||
class IdentifierError(SQLAlchemyError):
|
||||
"""Raised when a schema name is beyond the max character limit"""
|
||||
|
||||
|
||||
class DisconnectionError(SQLAlchemyError):
|
||||
"""A disconnect is detected on a raw DB-API connection.
|
||||
|
||||
This error is raised and consumed internally by a connection pool. It can
|
||||
be raised by the :meth:`.PoolEvents.checkout` event so that the host pool
|
||||
forces a retry; the exception will be caught three times in a row before
|
||||
the pool gives up and raises :class:`~sqlalchemy.exc.InvalidRequestError`
|
||||
regarding the connection attempt.
|
||||
|
||||
"""
|
||||
invalidate_pool = False
|
||||
|
||||
|
||||
class InvalidatePoolError(DisconnectionError):
|
||||
"""Raised when the connection pool should invalidate all stale connections.
|
||||
|
||||
A subclass of :class:`.DisconnectionError` that indicates that the
|
||||
disconnect situation encountered on the connection probably means the
|
||||
entire pool should be invalidated, as the database has been restarted.
|
||||
|
||||
This exception will be handled otherwise the same way as
|
||||
:class:`.DisconnectionError`, allowing three attempts to reconnect
|
||||
before giving up.
|
||||
|
||||
.. versionadded:: 1.2
|
||||
|
||||
"""
|
||||
invalidate_pool = True
|
||||
|
||||
|
||||
class TimeoutError(SQLAlchemyError):
|
||||
"""Raised when a connection pool times out on getting a connection."""
|
||||
|
||||
|
||||
class InvalidRequestError(SQLAlchemyError):
|
||||
"""SQLAlchemy was asked to do something it can't do.
|
||||
|
||||
This error generally corresponds to runtime state errors.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class NoInspectionAvailable(InvalidRequestError):
|
||||
"""A subject passed to :func:`sqlalchemy.inspection.inspect` produced
|
||||
no context for inspection."""
|
||||
|
||||
|
||||
class ResourceClosedError(InvalidRequestError):
|
||||
"""An operation was requested from a connection, cursor, or other
|
||||
object that's in a closed state."""
|
||||
|
||||
|
||||
class NoSuchColumnError(KeyError, InvalidRequestError):
|
||||
"""A nonexistent column is requested from a ``RowProxy``."""
|
||||
|
||||
|
||||
class NoReferenceError(InvalidRequestError):
|
||||
"""Raised by ``ForeignKey`` to indicate a reference cannot be resolved."""
|
||||
|
||||
|
||||
class NoReferencedTableError(NoReferenceError):
|
||||
"""Raised by ``ForeignKey`` when the referred ``Table`` cannot be
|
||||
located.
|
||||
|
||||
"""
|
||||
def __init__(self, message, tname):
|
||||
NoReferenceError.__init__(self, message)
|
||||
self.table_name = tname
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (self.args[0], self.table_name)
|
||||
|
||||
|
||||
class NoReferencedColumnError(NoReferenceError):
|
||||
"""Raised by ``ForeignKey`` when the referred ``Column`` cannot be
|
||||
located.
|
||||
|
||||
"""
|
||||
def __init__(self, message, tname, cname):
|
||||
NoReferenceError.__init__(self, message)
|
||||
self.table_name = tname
|
||||
self.column_name = cname
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (self.args[0], self.table_name,
|
||||
self.column_name)
|
||||
|
||||
|
||||
class NoSuchTableError(InvalidRequestError):
|
||||
"""Table does not exist or is not visible to a connection."""
|
||||
|
||||
|
||||
class UnreflectableTableError(InvalidRequestError):
|
||||
"""Table exists but can't be reflectted for some reason.
|
||||
|
||||
.. versionadded:: 1.2
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class UnboundExecutionError(InvalidRequestError):
|
||||
"""SQL was attempted without a database connection to execute it on."""
|
||||
|
||||
|
||||
class DontWrapMixin(object):
|
||||
"""A mixin class which, when applied to a user-defined Exception class,
|
||||
will not be wrapped inside of :exc:`.StatementError` if the error is
|
||||
emitted within the process of executing a statement.
|
||||
|
||||
E.g.::
|
||||
|
||||
from sqlalchemy.exc import DontWrapMixin
|
||||
|
||||
class MyCustomException(Exception, DontWrapMixin):
|
||||
pass
|
||||
|
||||
class MySpecialType(TypeDecorator):
|
||||
impl = String
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
if value == 'invalid':
|
||||
raise MyCustomException("invalid!")
|
||||
|
||||
"""
|
||||
|
||||
# Moved to orm.exc; compatibility definition installed by orm import until 0.6
|
||||
UnmappedColumnError = None
|
||||
|
||||
|
||||
class StatementError(SQLAlchemyError):
|
||||
"""An error occurred during execution of a SQL statement.
|
||||
|
||||
:class:`StatementError` wraps the exception raised
|
||||
during execution, and features :attr:`.statement`
|
||||
and :attr:`.params` attributes which supply context regarding
|
||||
the specifics of the statement which had an issue.
|
||||
|
||||
The wrapped exception object is available in
|
||||
the :attr:`.orig` attribute.
|
||||
|
||||
"""
|
||||
|
||||
statement = None
|
||||
"""The string SQL statement being invoked when this exception occurred."""
|
||||
|
||||
params = None
|
||||
"""The parameter list being used when this exception occurred."""
|
||||
|
||||
orig = None
|
||||
"""The DBAPI exception object."""
|
||||
|
||||
def __init__(self, message, statement, params, orig, code=None):
|
||||
SQLAlchemyError.__init__(self, message, code=code)
|
||||
self.statement = statement
|
||||
self.params = params
|
||||
self.orig = orig
|
||||
self.detail = []
|
||||
|
||||
def add_detail(self, msg):
|
||||
self.detail.append(msg)
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (self.args[0], self.statement,
|
||||
self.params, self.orig)
|
||||
|
||||
def __str__(self):
|
||||
from sqlalchemy.sql import util
|
||||
|
||||
details = [self._message()]
|
||||
if self.statement:
|
||||
details.append("[SQL: %r]" % self.statement)
|
||||
if self.params:
|
||||
params_repr = util._repr_params(self.params, 10)
|
||||
details.append("[parameters: %r]" % params_repr)
|
||||
code_str = self._code_str()
|
||||
if code_str:
|
||||
details.append(code_str)
|
||||
return ' '.join([
|
||||
"(%s)" % det for det in self.detail
|
||||
] + details)
|
||||
|
||||
|
||||
class DBAPIError(StatementError):
|
||||
"""Raised when the execution of a database operation fails.
|
||||
|
||||
Wraps exceptions raised by the DB-API underlying the
|
||||
database operation. Driver-specific implementations of the standard
|
||||
DB-API exception types are wrapped by matching sub-types of SQLAlchemy's
|
||||
:class:`DBAPIError` when possible. DB-API's ``Error`` type maps to
|
||||
:class:`DBAPIError` in SQLAlchemy, otherwise the names are identical. Note
|
||||
that there is no guarantee that different DB-API implementations will
|
||||
raise the same exception type for any given error condition.
|
||||
|
||||
:class:`DBAPIError` features :attr:`~.StatementError.statement`
|
||||
and :attr:`~.StatementError.params` attributes which supply context
|
||||
regarding the specifics of the statement which had an issue, for the
|
||||
typical case when the error was raised within the context of
|
||||
emitting a SQL statement.
|
||||
|
||||
The wrapped exception object is available in the
|
||||
:attr:`~.StatementError.orig` attribute. Its type and properties are
|
||||
DB-API implementation specific.
|
||||
|
||||
"""
|
||||
|
||||
code = 'dbapi'
|
||||
|
||||
@classmethod
|
||||
def instance(cls, statement, params,
|
||||
orig, dbapi_base_err,
|
||||
connection_invalidated=False,
|
||||
dialect=None):
|
||||
# Don't ever wrap these, just return them directly as if
|
||||
# DBAPIError didn't exist.
|
||||
if (isinstance(orig, BaseException) and
|
||||
not isinstance(orig, Exception)) or \
|
||||
isinstance(orig, DontWrapMixin):
|
||||
return orig
|
||||
|
||||
if orig is not None:
|
||||
# not a DBAPI error, statement is present.
|
||||
# raise a StatementError
|
||||
if isinstance(orig, SQLAlchemyError) and statement:
|
||||
return StatementError(
|
||||
"(%s.%s) %s" %
|
||||
(orig.__class__.__module__, orig.__class__.__name__,
|
||||
orig.args[0]),
|
||||
statement, params, orig, code=orig.code
|
||||
)
|
||||
elif not isinstance(orig, dbapi_base_err) and statement:
|
||||
return StatementError(
|
||||
"(%s.%s) %s" %
|
||||
(orig.__class__.__module__, orig.__class__.__name__,
|
||||
orig),
|
||||
statement, params, orig
|
||||
)
|
||||
|
||||
glob = globals()
|
||||
for super_ in orig.__class__.__mro__:
|
||||
name = super_.__name__
|
||||
if dialect:
|
||||
name = dialect.dbapi_exception_translation_map.get(
|
||||
name, name)
|
||||
if name in glob and issubclass(glob[name], DBAPIError):
|
||||
cls = glob[name]
|
||||
break
|
||||
|
||||
return cls(statement, params, orig, connection_invalidated,
|
||||
code=cls.code)
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (self.statement, self.params,
|
||||
self.orig, self.connection_invalidated)
|
||||
|
||||
def __init__(self, statement, params, orig, connection_invalidated=False,
|
||||
code=None):
|
||||
try:
|
||||
text = str(orig)
|
||||
except Exception as e:
|
||||
text = 'Error in str() of DB-API-generated exception: ' + str(e)
|
||||
StatementError.__init__(
|
||||
self,
|
||||
'(%s.%s) %s' % (
|
||||
orig.__class__.__module__, orig.__class__.__name__, text, ),
|
||||
statement,
|
||||
params,
|
||||
orig, code=code
|
||||
)
|
||||
self.connection_invalidated = connection_invalidated
|
||||
|
||||
|
||||
class InterfaceError(DBAPIError):
|
||||
"""Wraps a DB-API InterfaceError."""
|
||||
|
||||
code = "rvf5"
|
||||
|
||||
|
||||
class DatabaseError(DBAPIError):
|
||||
"""Wraps a DB-API DatabaseError."""
|
||||
|
||||
code = "4xp6"
|
||||
|
||||
|
||||
class DataError(DatabaseError):
|
||||
"""Wraps a DB-API DataError."""
|
||||
|
||||
code = "9h9h"
|
||||
|
||||
|
||||
class OperationalError(DatabaseError):
|
||||
"""Wraps a DB-API OperationalError."""
|
||||
|
||||
code = "e3q8"
|
||||
|
||||
|
||||
class IntegrityError(DatabaseError):
|
||||
"""Wraps a DB-API IntegrityError."""
|
||||
|
||||
code = "gkpj"
|
||||
|
||||
|
||||
class InternalError(DatabaseError):
|
||||
"""Wraps a DB-API InternalError."""
|
||||
|
||||
code = "2j85"
|
||||
|
||||
|
||||
class ProgrammingError(DatabaseError):
|
||||
"""Wraps a DB-API ProgrammingError."""
|
||||
|
||||
code = "f405"
|
||||
|
||||
|
||||
class NotSupportedError(DatabaseError):
|
||||
"""Wraps a DB-API NotSupportedError."""
|
||||
|
||||
code = "tw8g"
|
||||
|
||||
# Warnings
|
||||
|
||||
class SADeprecationWarning(DeprecationWarning):
|
||||
"""Issued once per usage of a deprecated API."""
|
||||
|
||||
|
||||
class SAPendingDeprecationWarning(PendingDeprecationWarning):
|
||||
"""Issued once per usage of a deprecated API."""
|
||||
|
||||
|
||||
class SAWarning(RuntimeWarning):
|
||||
"""Issued at runtime."""
|
|
@ -0,0 +1,11 @@
|
|||
# ext/__init__.py
|
||||
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .. import util as _sa_util
|
||||
|
||||
_sa_util.dependencies.resolve_all("sqlalchemy.ext")
|
||||
|
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue