mirror of
https://github.com/moparisthebest/SickRage
synced 2025-03-03 01:52:02 -05:00
Downgraded SQLAlchemy to version 0.8 from 0.9 for compatibility reasons
This commit is contained in:
parent
246708f600
commit
831507cb47
@ -23,7 +23,7 @@ http://www.sqlalchemy.org/docs/dbengine.myt#dbengine_supported
|
||||
try:
|
||||
from sqlalchemy import MetaData, Table, Column, String, Binary, select
|
||||
from shove import BaseStore, DbBase
|
||||
except ImportError:
|
||||
except ImportError, e:
|
||||
raise ImportError('Requires SQLAlchemy >= 0.4')
|
||||
|
||||
|
||||
|
@ -1,32 +1,128 @@
|
||||
# testing/__init__.py
|
||||
# sqlalchemy/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import inspect as _inspect
|
||||
import sys
|
||||
|
||||
from .warnings import testing_warn, assert_warnings, resetwarnings
|
||||
from .sql import (
|
||||
alias,
|
||||
and_,
|
||||
asc,
|
||||
between,
|
||||
bindparam,
|
||||
case,
|
||||
cast,
|
||||
collate,
|
||||
delete,
|
||||
desc,
|
||||
distinct,
|
||||
except_,
|
||||
except_all,
|
||||
exists,
|
||||
extract,
|
||||
func,
|
||||
insert,
|
||||
intersect,
|
||||
intersect_all,
|
||||
join,
|
||||
literal,
|
||||
literal_column,
|
||||
modifier,
|
||||
not_,
|
||||
null,
|
||||
or_,
|
||||
outerjoin,
|
||||
outparam,
|
||||
over,
|
||||
select,
|
||||
subquery,
|
||||
text,
|
||||
tuple_,
|
||||
type_coerce,
|
||||
union,
|
||||
union_all,
|
||||
update,
|
||||
)
|
||||
|
||||
from . import config
|
||||
|
||||
from .exclusions import db_spec, _is_excluded, fails_if, skip_if, future,\
|
||||
fails_on, fails_on_everything_except, skip, only_on, exclude, \
|
||||
against as _against, _server_version, only_if
|
||||
from .types import (
|
||||
BIGINT,
|
||||
BINARY,
|
||||
BLOB,
|
||||
BOOLEAN,
|
||||
BigInteger,
|
||||
Binary,
|
||||
Boolean,
|
||||
CHAR,
|
||||
CLOB,
|
||||
DATE,
|
||||
DATETIME,
|
||||
DECIMAL,
|
||||
Date,
|
||||
DateTime,
|
||||
Enum,
|
||||
FLOAT,
|
||||
Float,
|
||||
INT,
|
||||
INTEGER,
|
||||
Integer,
|
||||
Interval,
|
||||
LargeBinary,
|
||||
NCHAR,
|
||||
NVARCHAR,
|
||||
NUMERIC,
|
||||
Numeric,
|
||||
PickleType,
|
||||
REAL,
|
||||
SMALLINT,
|
||||
SmallInteger,
|
||||
String,
|
||||
TEXT,
|
||||
TIME,
|
||||
TIMESTAMP,
|
||||
Text,
|
||||
Time,
|
||||
TypeDecorator,
|
||||
Unicode,
|
||||
UnicodeText,
|
||||
VARBINARY,
|
||||
VARCHAR,
|
||||
)
|
||||
|
||||
|
||||
def against(*queries):
|
||||
return _against(config._current, *queries)
|
||||
from .schema import (
|
||||
CheckConstraint,
|
||||
Column,
|
||||
ColumnDefault,
|
||||
Constraint,
|
||||
DDL,
|
||||
DefaultClause,
|
||||
FetchedValue,
|
||||
ForeignKey,
|
||||
ForeignKeyConstraint,
|
||||
Index,
|
||||
MetaData,
|
||||
PassiveDefault,
|
||||
PrimaryKeyConstraint,
|
||||
Sequence,
|
||||
Table,
|
||||
ThreadLocalMetaData,
|
||||
UniqueConstraint,
|
||||
)
|
||||
|
||||
from .assertions import emits_warning, emits_warning_on, uses_deprecated, \
|
||||
eq_, ne_, is_, is_not_, startswith_, assert_raises, \
|
||||
assert_raises_message, AssertsCompiledSQL, ComparesTables, \
|
||||
AssertsExecutionResults, expect_deprecated
|
||||
from .inspection import inspect
|
||||
|
||||
from .util import run_as_contextmanager, rowset, fail, provide_metadata, adict
|
||||
from .engine import create_engine, engine_from_config
|
||||
|
||||
crashes = skip
|
||||
|
||||
from .config import db
|
||||
from .config import requirements as requires
|
||||
__all__ = sorted(name for name, obj in locals().items()
|
||||
if not (name.startswith('_') or _inspect.ismodule(obj)))
|
||||
|
||||
from . import mock
|
||||
__version__ = '0.8.6'
|
||||
|
||||
del _inspect, sys
|
||||
|
||||
from . import util as _sa_util
|
||||
_sa_util.importlater.resolve_all()
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
processors.c
|
||||
Copyright (C) 2010-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
Copyright (C) 2010-2011 Gaetan de Menten gdementen@gmail.com
|
||||
Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com
|
||||
|
||||
This module is part of SQLAlchemy and is released under
|
||||
the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
@ -10,15 +10,13 @@ the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
#include <Python.h>
|
||||
#include <datetime.h>
|
||||
|
||||
#define MODULE_NAME "cprocessors"
|
||||
#define MODULE_DOC "Module containing C versions of data processing functions."
|
||||
|
||||
#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN)
|
||||
typedef int Py_ssize_t;
|
||||
#define PY_SSIZE_T_MAX INT_MAX
|
||||
#define PY_SSIZE_T_MIN INT_MIN
|
||||
#endif
|
||||
|
||||
|
||||
static PyObject *
|
||||
int_to_boolean(PyObject *self, PyObject *arg)
|
||||
{
|
||||
@ -28,12 +26,7 @@ int_to_boolean(PyObject *self, PyObject *arg)
|
||||
if (arg == Py_None)
|
||||
Py_RETURN_NONE;
|
||||
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
l = PyLong_AsLong(arg);
|
||||
#else
|
||||
l = PyInt_AsLong(arg);
|
||||
#endif
|
||||
if (l == 0) {
|
||||
res = Py_False;
|
||||
} else if (l == 1) {
|
||||
@ -72,48 +65,23 @@ to_float(PyObject *self, PyObject *arg)
|
||||
static PyObject *
|
||||
str_to_datetime(PyObject *self, PyObject *arg)
|
||||
{
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
PyObject *bytes;
|
||||
PyObject *err_bytes;
|
||||
#endif
|
||||
const char *str;
|
||||
int numparsed;
|
||||
unsigned int year, month, day, hour, minute, second, microsecond = 0;
|
||||
PyObject *err_repr;
|
||||
|
||||
if (arg == Py_None)
|
||||
Py_RETURN_NONE;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
bytes = PyUnicode_AsASCIIString(arg);
|
||||
if (bytes == NULL)
|
||||
str = NULL;
|
||||
else
|
||||
str = PyBytes_AS_STRING(bytes);
|
||||
#else
|
||||
str = PyString_AsString(arg);
|
||||
#endif
|
||||
if (str == NULL) {
|
||||
err_repr = PyObject_Repr(arg);
|
||||
if (err_repr == NULL)
|
||||
return NULL;
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
err_bytes = PyUnicode_AsASCIIString(err_repr);
|
||||
if (err_bytes == NULL)
|
||||
return NULL;
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse datetime string '%.200s' "
|
||||
"- value is not a string.",
|
||||
PyBytes_AS_STRING(err_bytes));
|
||||
Py_DECREF(err_bytes);
|
||||
#else
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse datetime string '%.200s' "
|
||||
"- value is not a string.",
|
||||
PyString_AsString(err_repr));
|
||||
#endif
|
||||
Py_DECREF(err_repr);
|
||||
return NULL;
|
||||
}
|
||||
@ -124,30 +92,15 @@ str_to_datetime(PyObject *self, PyObject *arg)
|
||||
not accept "2000-01-01 00:00:00.". I don't know which is better, but they
|
||||
should be coherent.
|
||||
*/
|
||||
numparsed = sscanf(str, "%4u-%2u-%2u %2u:%2u:%2u.%6u", &year, &month, &day,
|
||||
&hour, &minute, &second, µsecond);
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
Py_DECREF(bytes);
|
||||
#endif
|
||||
if (numparsed < 6) {
|
||||
if (sscanf(str, "%4u-%2u-%2u %2u:%2u:%2u.%6u", &year, &month, &day,
|
||||
&hour, &minute, &second, µsecond) < 6) {
|
||||
err_repr = PyObject_Repr(arg);
|
||||
if (err_repr == NULL)
|
||||
return NULL;
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
err_bytes = PyUnicode_AsASCIIString(err_repr);
|
||||
if (err_bytes == NULL)
|
||||
return NULL;
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse datetime string: %.200s",
|
||||
PyBytes_AS_STRING(err_bytes));
|
||||
Py_DECREF(err_bytes);
|
||||
#else
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse datetime string: %.200s",
|
||||
PyString_AsString(err_repr));
|
||||
#endif
|
||||
Py_DECREF(err_repr);
|
||||
return NULL;
|
||||
}
|
||||
@ -158,47 +111,22 @@ str_to_datetime(PyObject *self, PyObject *arg)
|
||||
static PyObject *
|
||||
str_to_time(PyObject *self, PyObject *arg)
|
||||
{
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
PyObject *bytes;
|
||||
PyObject *err_bytes;
|
||||
#endif
|
||||
const char *str;
|
||||
int numparsed;
|
||||
unsigned int hour, minute, second, microsecond = 0;
|
||||
PyObject *err_repr;
|
||||
|
||||
if (arg == Py_None)
|
||||
Py_RETURN_NONE;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
bytes = PyUnicode_AsASCIIString(arg);
|
||||
if (bytes == NULL)
|
||||
str = NULL;
|
||||
else
|
||||
str = PyBytes_AS_STRING(bytes);
|
||||
#else
|
||||
str = PyString_AsString(arg);
|
||||
#endif
|
||||
if (str == NULL) {
|
||||
err_repr = PyObject_Repr(arg);
|
||||
if (err_repr == NULL)
|
||||
return NULL;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
err_bytes = PyUnicode_AsASCIIString(err_repr);
|
||||
if (err_bytes == NULL)
|
||||
return NULL;
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse time string '%.200s' - value is not a string.",
|
||||
PyBytes_AS_STRING(err_bytes));
|
||||
Py_DECREF(err_bytes);
|
||||
#else
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse time string '%.200s' - value is not a string.",
|
||||
PyString_AsString(err_repr));
|
||||
#endif
|
||||
Py_DECREF(err_repr);
|
||||
return NULL;
|
||||
}
|
||||
@ -209,30 +137,15 @@ str_to_time(PyObject *self, PyObject *arg)
|
||||
not accept "00:00:00.". I don't know which is better, but they should be
|
||||
coherent.
|
||||
*/
|
||||
numparsed = sscanf(str, "%2u:%2u:%2u.%6u", &hour, &minute, &second,
|
||||
µsecond);
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
Py_DECREF(bytes);
|
||||
#endif
|
||||
if (numparsed < 3) {
|
||||
if (sscanf(str, "%2u:%2u:%2u.%6u", &hour, &minute, &second,
|
||||
µsecond) < 3) {
|
||||
err_repr = PyObject_Repr(arg);
|
||||
if (err_repr == NULL)
|
||||
return NULL;
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
err_bytes = PyUnicode_AsASCIIString(err_repr);
|
||||
if (err_bytes == NULL)
|
||||
return NULL;
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse time string: %.200s",
|
||||
PyBytes_AS_STRING(err_bytes));
|
||||
Py_DECREF(err_bytes);
|
||||
#else
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse time string: %.200s",
|
||||
PyString_AsString(err_repr));
|
||||
#endif
|
||||
Py_DECREF(err_repr);
|
||||
return NULL;
|
||||
}
|
||||
@ -242,73 +155,34 @@ str_to_time(PyObject *self, PyObject *arg)
|
||||
static PyObject *
|
||||
str_to_date(PyObject *self, PyObject *arg)
|
||||
{
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
PyObject *bytes;
|
||||
PyObject *err_bytes;
|
||||
#endif
|
||||
const char *str;
|
||||
int numparsed;
|
||||
unsigned int year, month, day;
|
||||
PyObject *err_repr;
|
||||
|
||||
if (arg == Py_None)
|
||||
Py_RETURN_NONE;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
bytes = PyUnicode_AsASCIIString(arg);
|
||||
if (bytes == NULL)
|
||||
str = NULL;
|
||||
else
|
||||
str = PyBytes_AS_STRING(bytes);
|
||||
#else
|
||||
str = PyString_AsString(arg);
|
||||
#endif
|
||||
if (str == NULL) {
|
||||
err_repr = PyObject_Repr(arg);
|
||||
if (err_repr == NULL)
|
||||
return NULL;
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
err_bytes = PyUnicode_AsASCIIString(err_repr);
|
||||
if (err_bytes == NULL)
|
||||
return NULL;
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse date string '%.200s' - value is not a string.",
|
||||
PyBytes_AS_STRING(err_bytes));
|
||||
Py_DECREF(err_bytes);
|
||||
#else
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse date string '%.200s' - value is not a string.",
|
||||
PyString_AsString(err_repr));
|
||||
#endif
|
||||
Py_DECREF(err_repr);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
numparsed = sscanf(str, "%4u-%2u-%2u", &year, &month, &day);
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
Py_DECREF(bytes);
|
||||
#endif
|
||||
if (numparsed != 3) {
|
||||
if (sscanf(str, "%4u-%2u-%2u", &year, &month, &day) != 3) {
|
||||
err_repr = PyObject_Repr(arg);
|
||||
if (err_repr == NULL)
|
||||
return NULL;
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
err_bytes = PyUnicode_AsASCIIString(err_repr);
|
||||
if (err_bytes == NULL)
|
||||
return NULL;
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse date string: %.200s",
|
||||
PyBytes_AS_STRING(err_bytes));
|
||||
Py_DECREF(err_bytes);
|
||||
#else
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse date string: %.200s",
|
||||
PyString_AsString(err_repr));
|
||||
#endif
|
||||
Py_DECREF(err_repr);
|
||||
return NULL;
|
||||
}
|
||||
@ -345,35 +219,17 @@ UnicodeResultProcessor_init(UnicodeResultProcessor *self, PyObject *args,
|
||||
PyObject *encoding, *errors = NULL;
|
||||
static char *kwlist[] = {"encoding", "errors", NULL};
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwds, "U|U:__init__", kwlist,
|
||||
&encoding, &errors))
|
||||
return -1;
|
||||
#else
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwds, "S|S:__init__", kwlist,
|
||||
&encoding, &errors))
|
||||
return -1;
|
||||
#endif
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
encoding = PyUnicode_AsASCIIString(encoding);
|
||||
#else
|
||||
Py_INCREF(encoding);
|
||||
#endif
|
||||
self->encoding = encoding;
|
||||
|
||||
if (errors) {
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
errors = PyUnicode_AsASCIIString(errors);
|
||||
#else
|
||||
Py_INCREF(errors);
|
||||
#endif
|
||||
} else {
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
errors = PyBytes_FromString("strict");
|
||||
#else
|
||||
errors = PyString_FromString("strict");
|
||||
#endif
|
||||
if (errors == NULL)
|
||||
return -1;
|
||||
}
|
||||
@ -392,58 +248,11 @@ UnicodeResultProcessor_process(UnicodeResultProcessor *self, PyObject *value)
|
||||
if (value == Py_None)
|
||||
Py_RETURN_NONE;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
if (PyBytes_AsStringAndSize(value, &str, &len))
|
||||
return NULL;
|
||||
|
||||
encoding = PyBytes_AS_STRING(self->encoding);
|
||||
errors = PyBytes_AS_STRING(self->errors);
|
||||
#else
|
||||
if (PyString_AsStringAndSize(value, &str, &len))
|
||||
return NULL;
|
||||
|
||||
encoding = PyString_AS_STRING(self->encoding);
|
||||
errors = PyString_AS_STRING(self->errors);
|
||||
#endif
|
||||
|
||||
return PyUnicode_Decode(str, len, encoding, errors);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
UnicodeResultProcessor_conditional_process(UnicodeResultProcessor *self, PyObject *value)
|
||||
{
|
||||
const char *encoding, *errors;
|
||||
char *str;
|
||||
Py_ssize_t len;
|
||||
|
||||
if (value == Py_None)
|
||||
Py_RETURN_NONE;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
if (PyUnicode_Check(value) == 1) {
|
||||
Py_INCREF(value);
|
||||
return value;
|
||||
}
|
||||
|
||||
if (PyBytes_AsStringAndSize(value, &str, &len))
|
||||
return NULL;
|
||||
|
||||
encoding = PyBytes_AS_STRING(self->encoding);
|
||||
errors = PyBytes_AS_STRING(self->errors);
|
||||
#else
|
||||
|
||||
if (PyUnicode_Check(value) == 1) {
|
||||
Py_INCREF(value);
|
||||
return value;
|
||||
}
|
||||
|
||||
if (PyString_AsStringAndSize(value, &str, &len))
|
||||
return NULL;
|
||||
|
||||
|
||||
encoding = PyString_AS_STRING(self->encoding);
|
||||
errors = PyString_AS_STRING(self->errors);
|
||||
#endif
|
||||
|
||||
return PyUnicode_Decode(str, len, encoding, errors);
|
||||
}
|
||||
@ -453,23 +262,18 @@ UnicodeResultProcessor_dealloc(UnicodeResultProcessor *self)
|
||||
{
|
||||
Py_XDECREF(self->encoding);
|
||||
Py_XDECREF(self->errors);
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
Py_TYPE(self)->tp_free((PyObject*)self);
|
||||
#else
|
||||
self->ob_type->tp_free((PyObject*)self);
|
||||
#endif
|
||||
}
|
||||
|
||||
static PyMethodDef UnicodeResultProcessor_methods[] = {
|
||||
{"process", (PyCFunction)UnicodeResultProcessor_process, METH_O,
|
||||
"The value processor itself."},
|
||||
{"conditional_process", (PyCFunction)UnicodeResultProcessor_conditional_process, METH_O,
|
||||
"Conditional version of the value processor."},
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
static PyTypeObject UnicodeResultProcessorType = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
PyObject_HEAD_INIT(NULL)
|
||||
0, /* ob_size */
|
||||
"sqlalchemy.cprocessors.UnicodeResultProcessor", /* tp_name */
|
||||
sizeof(UnicodeResultProcessor), /* tp_basicsize */
|
||||
0, /* tp_itemsize */
|
||||
@ -519,11 +323,7 @@ DecimalResultProcessor_init(DecimalResultProcessor *self, PyObject *args,
|
||||
{
|
||||
PyObject *type, *format;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
if (!PyArg_ParseTuple(args, "OU", &type, &format))
|
||||
#else
|
||||
if (!PyArg_ParseTuple(args, "OS", &type, &format))
|
||||
#endif
|
||||
return -1;
|
||||
|
||||
Py_INCREF(type);
|
||||
@ -543,21 +343,11 @@ DecimalResultProcessor_process(DecimalResultProcessor *self, PyObject *value)
|
||||
if (value == Py_None)
|
||||
Py_RETURN_NONE;
|
||||
|
||||
/* Decimal does not accept float values directly */
|
||||
/* SQLite can also give us an integer here (see [ticket:2432]) */
|
||||
/* XXX: starting with Python 3.1, we could use Decimal.from_float(f),
|
||||
but the result wouldn't be the same */
|
||||
|
||||
args = PyTuple_Pack(1, value);
|
||||
if (args == NULL)
|
||||
return NULL;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
str = PyUnicode_Format(self->format, args);
|
||||
#else
|
||||
str = PyString_Format(self->format, args);
|
||||
#endif
|
||||
|
||||
Py_DECREF(args);
|
||||
if (str == NULL)
|
||||
return NULL;
|
||||
@ -572,11 +362,7 @@ DecimalResultProcessor_dealloc(DecimalResultProcessor *self)
|
||||
{
|
||||
Py_XDECREF(self->type);
|
||||
Py_XDECREF(self->format);
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
Py_TYPE(self)->tp_free((PyObject*)self);
|
||||
#else
|
||||
self->ob_type->tp_free((PyObject*)self);
|
||||
#endif
|
||||
}
|
||||
|
||||
static PyMethodDef DecimalResultProcessor_methods[] = {
|
||||
@ -586,7 +372,8 @@ static PyMethodDef DecimalResultProcessor_methods[] = {
|
||||
};
|
||||
|
||||
static PyTypeObject DecimalResultProcessorType = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
PyObject_HEAD_INIT(NULL)
|
||||
0, /* ob_size */
|
||||
"sqlalchemy.DecimalResultProcessor", /* tp_name */
|
||||
sizeof(DecimalResultProcessor), /* tp_basicsize */
|
||||
0, /* tp_itemsize */
|
||||
@ -626,6 +413,11 @@ static PyTypeObject DecimalResultProcessorType = {
|
||||
0, /* tp_new */
|
||||
};
|
||||
|
||||
#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
|
||||
#define PyMODINIT_FUNC void
|
||||
#endif
|
||||
|
||||
|
||||
static PyMethodDef module_methods[] = {
|
||||
{"int_to_boolean", int_to_boolean, METH_O,
|
||||
"Convert an integer to a boolean."},
|
||||
@ -642,53 +434,23 @@ static PyMethodDef module_methods[] = {
|
||||
{NULL, NULL, 0, NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
|
||||
#define PyMODINIT_FUNC void
|
||||
#endif
|
||||
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
|
||||
static struct PyModuleDef module_def = {
|
||||
PyModuleDef_HEAD_INIT,
|
||||
MODULE_NAME,
|
||||
MODULE_DOC,
|
||||
-1,
|
||||
module_methods
|
||||
};
|
||||
|
||||
#define INITERROR return NULL
|
||||
|
||||
PyMODINIT_FUNC
|
||||
PyInit_cprocessors(void)
|
||||
|
||||
#else
|
||||
|
||||
#define INITERROR return
|
||||
|
||||
PyMODINIT_FUNC
|
||||
initcprocessors(void)
|
||||
|
||||
#endif
|
||||
|
||||
{
|
||||
PyObject *m;
|
||||
|
||||
UnicodeResultProcessorType.tp_new = PyType_GenericNew;
|
||||
if (PyType_Ready(&UnicodeResultProcessorType) < 0)
|
||||
INITERROR;
|
||||
return;
|
||||
|
||||
DecimalResultProcessorType.tp_new = PyType_GenericNew;
|
||||
if (PyType_Ready(&DecimalResultProcessorType) < 0)
|
||||
INITERROR;
|
||||
return;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
m = PyModule_Create(&module_def);
|
||||
#else
|
||||
m = Py_InitModule3(MODULE_NAME, module_methods, MODULE_DOC);
|
||||
#endif
|
||||
m = Py_InitModule3("cprocessors", module_methods,
|
||||
"Module containing C versions of data processing functions.");
|
||||
if (m == NULL)
|
||||
INITERROR;
|
||||
return;
|
||||
|
||||
PyDateTime_IMPORT;
|
||||
|
||||
@ -699,8 +461,5 @@ initcprocessors(void)
|
||||
Py_INCREF(&DecimalResultProcessorType);
|
||||
PyModule_AddObject(m, "DecimalResultProcessor",
|
||||
(PyObject *)&DecimalResultProcessorType);
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
return m;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
resultproxy.c
|
||||
Copyright (C) 2010-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
Copyright (C) 2010-2011 Gaetan de Menten gdementen@gmail.com
|
||||
Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com
|
||||
|
||||
This module is part of SQLAlchemy and is released under
|
||||
the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
@ -9,9 +9,6 @@ the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
#include <Python.h>
|
||||
|
||||
#define MODULE_NAME "cresultproxy"
|
||||
#define MODULE_DOC "Module containing C versions of core ResultProxy classes."
|
||||
|
||||
#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN)
|
||||
typedef int Py_ssize_t;
|
||||
#define PY_SSIZE_T_MAX INT_MAX
|
||||
@ -153,11 +150,7 @@ BaseRowProxy_dealloc(BaseRowProxy *self)
|
||||
Py_XDECREF(self->row);
|
||||
Py_XDECREF(self->processors);
|
||||
Py_XDECREF(self->keymap);
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
Py_TYPE(self)->tp_free((PyObject *)self);
|
||||
#else
|
||||
self->ob_type->tp_free((PyObject *)self);
|
||||
#endif
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
@ -252,21 +245,14 @@ BaseRowProxy_subscript(BaseRowProxy *self, PyObject *key)
|
||||
PyObject *processor, *value, *processed_value;
|
||||
PyObject *row, *record, *result, *indexobject;
|
||||
PyObject *exc_module, *exception, *cstr_obj;
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
PyObject *bytes;
|
||||
#endif
|
||||
char *cstr_key;
|
||||
long index;
|
||||
int key_fallback = 0;
|
||||
int tuple_check = 0;
|
||||
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
if (PyInt_CheckExact(key)) {
|
||||
index = PyInt_AS_LONG(key);
|
||||
}
|
||||
#endif
|
||||
|
||||
if (PyLong_CheckExact(key)) {
|
||||
} else if (PyLong_CheckExact(key)) {
|
||||
index = PyLong_AsLong(key);
|
||||
if ((index == -1) && PyErr_Occurred())
|
||||
/* -1 can be either the actual value, or an error flag. */
|
||||
@ -319,21 +305,7 @@ BaseRowProxy_subscript(BaseRowProxy *self, PyObject *key)
|
||||
cstr_obj = PyObject_Str(key);
|
||||
if (cstr_obj == NULL)
|
||||
return NULL;
|
||||
|
||||
/*
|
||||
FIXME: raise encoding error exception (in both versions below)
|
||||
if the key contains non-ascii chars, instead of an
|
||||
InvalidRequestError without any message like in the
|
||||
python version.
|
||||
*/
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
bytes = PyUnicode_AsASCIIString(cstr_obj);
|
||||
if (bytes == NULL)
|
||||
return NULL;
|
||||
cstr_key = PyBytes_AS_STRING(bytes);
|
||||
#else
|
||||
cstr_key = PyString_AsString(cstr_obj);
|
||||
#endif
|
||||
if (cstr_key == NULL) {
|
||||
Py_DECREF(cstr_obj);
|
||||
return NULL;
|
||||
@ -346,11 +318,7 @@ BaseRowProxy_subscript(BaseRowProxy *self, PyObject *key)
|
||||
return NULL;
|
||||
}
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
index = PyLong_AsLong(indexobject);
|
||||
#else
|
||||
index = PyInt_AsLong(indexobject);
|
||||
#endif
|
||||
if ((index == -1) && PyErr_Occurred())
|
||||
/* -1 can be either the actual value, or an error flag. */
|
||||
return NULL;
|
||||
@ -389,23 +357,13 @@ BaseRowProxy_subscript(BaseRowProxy *self, PyObject *key)
|
||||
static PyObject *
|
||||
BaseRowProxy_getitem(PyObject *self, Py_ssize_t i)
|
||||
{
|
||||
PyObject *index;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
index = PyLong_FromSsize_t(i);
|
||||
#else
|
||||
index = PyInt_FromSsize_t(i);
|
||||
#endif
|
||||
return BaseRowProxy_subscript((BaseRowProxy*)self, index);
|
||||
return BaseRowProxy_subscript((BaseRowProxy*)self, PyInt_FromSsize_t(i));
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
BaseRowProxy_getattro(BaseRowProxy *self, PyObject *name)
|
||||
{
|
||||
PyObject *tmp;
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
PyObject *err_bytes;
|
||||
#endif
|
||||
|
||||
if (!(tmp = PyObject_GenericGetAttr((PyObject *)self, name))) {
|
||||
if (!PyErr_ExceptionMatches(PyExc_AttributeError))
|
||||
@ -417,23 +375,11 @@ BaseRowProxy_getattro(BaseRowProxy *self, PyObject *name)
|
||||
|
||||
tmp = BaseRowProxy_subscript(self, name);
|
||||
if (tmp == NULL && PyErr_ExceptionMatches(PyExc_KeyError)) {
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
err_bytes = PyUnicode_AsASCIIString(name);
|
||||
if (err_bytes == NULL)
|
||||
return NULL;
|
||||
PyErr_Format(
|
||||
PyExc_AttributeError,
|
||||
"Could not locate column in row for column '%.200s'",
|
||||
PyBytes_AS_STRING(err_bytes)
|
||||
);
|
||||
#else
|
||||
PyErr_Format(
|
||||
PyExc_AttributeError,
|
||||
"Could not locate column in row for column '%.200s'",
|
||||
PyString_AsString(name)
|
||||
);
|
||||
#endif
|
||||
return NULL;
|
||||
}
|
||||
return tmp;
|
||||
@ -619,7 +565,8 @@ static PyMappingMethods BaseRowProxy_as_mapping = {
|
||||
};
|
||||
|
||||
static PyTypeObject BaseRowProxyType = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
PyObject_HEAD_INIT(NULL)
|
||||
0, /* ob_size */
|
||||
"sqlalchemy.cresultproxy.BaseRowProxy", /* tp_name */
|
||||
sizeof(BaseRowProxy), /* tp_basicsize */
|
||||
0, /* tp_itemsize */
|
||||
@ -659,60 +606,34 @@ static PyTypeObject BaseRowProxyType = {
|
||||
0 /* tp_new */
|
||||
};
|
||||
|
||||
static PyMethodDef module_methods[] = {
|
||||
{"safe_rowproxy_reconstructor", safe_rowproxy_reconstructor, METH_VARARGS,
|
||||
"reconstruct a RowProxy instance from its pickled form."},
|
||||
{NULL, NULL, 0, NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
|
||||
#define PyMODINIT_FUNC void
|
||||
#endif
|
||||
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
|
||||
static struct PyModuleDef module_def = {
|
||||
PyModuleDef_HEAD_INIT,
|
||||
MODULE_NAME,
|
||||
MODULE_DOC,
|
||||
-1,
|
||||
module_methods
|
||||
static PyMethodDef module_methods[] = {
|
||||
{"safe_rowproxy_reconstructor", safe_rowproxy_reconstructor, METH_VARARGS,
|
||||
"reconstruct a RowProxy instance from its pickled form."},
|
||||
{NULL, NULL, 0, NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
#define INITERROR return NULL
|
||||
|
||||
PyMODINIT_FUNC
|
||||
PyInit_cresultproxy(void)
|
||||
|
||||
#else
|
||||
|
||||
#define INITERROR return
|
||||
|
||||
PyMODINIT_FUNC
|
||||
initcresultproxy(void)
|
||||
|
||||
#endif
|
||||
|
||||
{
|
||||
PyObject *m;
|
||||
|
||||
BaseRowProxyType.tp_new = PyType_GenericNew;
|
||||
if (PyType_Ready(&BaseRowProxyType) < 0)
|
||||
INITERROR;
|
||||
return;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
m = PyModule_Create(&module_def);
|
||||
#else
|
||||
m = Py_InitModule3(MODULE_NAME, module_methods, MODULE_DOC);
|
||||
#endif
|
||||
m = Py_InitModule3("cresultproxy", module_methods,
|
||||
"Module containing C versions of core ResultProxy classes.");
|
||||
if (m == NULL)
|
||||
INITERROR;
|
||||
return;
|
||||
|
||||
Py_INCREF(&BaseRowProxyType);
|
||||
PyModule_AddObject(m, "BaseRowProxy", (PyObject *)&BaseRowProxyType);
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
return m;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
@ -8,9 +8,6 @@ the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
#include <Python.h>
|
||||
|
||||
#define MODULE_NAME "cutils"
|
||||
#define MODULE_DOC "Module containing C versions of utility functions."
|
||||
|
||||
/*
|
||||
Given arguments from the calling form *multiparams, **params,
|
||||
return a list of bind parameter structures, usually a list of
|
||||
@ -175,51 +172,26 @@ distill_params(PyObject *self, PyObject *args)
|
||||
}
|
||||
}
|
||||
|
||||
#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
|
||||
#define PyMODINIT_FUNC void
|
||||
#endif
|
||||
|
||||
|
||||
static PyMethodDef module_methods[] = {
|
||||
{"_distill_params", distill_params, METH_VARARGS,
|
||||
"Distill an execute() parameter structure."},
|
||||
{NULL, NULL, 0, NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
|
||||
#define PyMODINIT_FUNC void
|
||||
#endif
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
|
||||
static struct PyModuleDef module_def = {
|
||||
PyModuleDef_HEAD_INIT,
|
||||
MODULE_NAME,
|
||||
MODULE_DOC,
|
||||
-1,
|
||||
module_methods
|
||||
};
|
||||
#endif
|
||||
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
PyMODINIT_FUNC
|
||||
PyInit_cutils(void)
|
||||
#else
|
||||
PyMODINIT_FUNC
|
||||
initcutils(void)
|
||||
#endif
|
||||
{
|
||||
PyObject *m;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
m = PyModule_Create(&module_def);
|
||||
#else
|
||||
m = Py_InitModule3(MODULE_NAME, module_methods, MODULE_DOC);
|
||||
#endif
|
||||
m = Py_InitModule3("cutils", module_methods,
|
||||
"Internal utility functions.");
|
||||
if (m == NULL)
|
||||
return;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
if (m == NULL)
|
||||
return NULL;
|
||||
return m;
|
||||
#else
|
||||
if (m == NULL)
|
||||
return;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
@ -82,7 +82,7 @@ class MxODBCConnector(Connector):
|
||||
category=errorclass,
|
||||
stacklevel=2)
|
||||
else:
|
||||
raise errorclass(errorvalue)
|
||||
raise errorclass, errorvalue
|
||||
return error_handler
|
||||
|
||||
def create_connect_args(self, url):
|
||||
|
@ -1,9 +1,3 @@
|
||||
# connectors/mysqldb.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Define behaviors common to MySQLdb dialects.
|
||||
|
||||
Currently includes MySQL and Drizzle.
|
||||
@ -62,7 +56,6 @@ class MySQLDBConnector(Connector):
|
||||
# is overridden when pymysql is used
|
||||
return __import__('MySQLdb')
|
||||
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context=None):
|
||||
rowcount = cursor.executemany(statement, parameters)
|
||||
if context is not None:
|
||||
|
@ -5,23 +5,20 @@
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from . import Connector
|
||||
from .. import util
|
||||
|
||||
from ..util import asbool
|
||||
|
||||
import sys
|
||||
import re
|
||||
import urllib
|
||||
|
||||
|
||||
class PyODBCConnector(Connector):
|
||||
driver = 'pyodbc'
|
||||
|
||||
supports_sane_multi_rowcount = False
|
||||
|
||||
if util.py2k:
|
||||
# PyODBC unicode is broken on UCS-4 builds
|
||||
supports_unicode = sys.maxunicode == 65535
|
||||
supports_unicode_statements = supports_unicode
|
||||
|
||||
# PyODBC unicode is broken on UCS-4 builds
|
||||
supports_unicode = sys.maxunicode == 65535
|
||||
supports_unicode_statements = supports_unicode
|
||||
supports_native_decimal = True
|
||||
default_paramstyle = 'named'
|
||||
|
||||
@ -59,10 +56,10 @@ class PyODBCConnector(Connector):
|
||||
connect_args = {}
|
||||
for param in ('ansi', 'unicode_results', 'autocommit'):
|
||||
if param in keys:
|
||||
connect_args[param] = util.asbool(keys.pop(param))
|
||||
connect_args[param] = asbool(keys.pop(param))
|
||||
|
||||
if 'odbc_connect' in keys:
|
||||
connectors = [util.unquote_plus(keys.pop('odbc_connect'))]
|
||||
connectors = [urllib.unquote_plus(keys.pop('odbc_connect'))]
|
||||
else:
|
||||
dsn_connection = 'dsn' in keys or \
|
||||
('host' in keys and 'database' not in keys)
|
||||
@ -94,7 +91,7 @@ class PyODBCConnector(Connector):
|
||||
connectors.append("AutoTranslate=%s" %
|
||||
keys.pop("odbc_autotranslate"))
|
||||
|
||||
connectors.extend(['%s=%s' % (k, v) for k, v in keys.items()])
|
||||
connectors.extend(['%s=%s' % (k, v) for k, v in keys.iteritems()])
|
||||
return [[";".join(connectors)], connect_args]
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
@ -124,19 +121,18 @@ class PyODBCConnector(Connector):
|
||||
self.freetds_driver_version = dbapi_con.getinfo(
|
||||
pyodbc.SQL_DRIVER_VER)
|
||||
|
||||
# the "Py2K only" part here is theoretical.
|
||||
# have not tried pyodbc + python3.1 yet.
|
||||
# Py2K
|
||||
self.supports_unicode_statements = (
|
||||
not util.py2k or
|
||||
(not self.freetds and not self.easysoft)
|
||||
)
|
||||
|
||||
not self.freetds and not self.easysoft)
|
||||
if self._user_supports_unicode_binds is not None:
|
||||
self.supports_unicode_binds = self._user_supports_unicode_binds
|
||||
elif util.py2k:
|
||||
else:
|
||||
self.supports_unicode_binds = (
|
||||
not self.freetds or self.freetds_driver_version >= '0.91'
|
||||
) and not self.easysoft
|
||||
else:
|
||||
self.supports_unicode_binds = True
|
||||
# end Py2K
|
||||
|
||||
# run other initialization which asks for user name, etc.
|
||||
super(PyODBCConnector, self).initialize(connection)
|
||||
|
@ -15,6 +15,7 @@ from ..dialects.mysql import base as mysql
|
||||
from ..dialects.drizzle import base as drizzle
|
||||
from ..dialects.oracle import base as oracle
|
||||
from ..dialects.firebird import base as firebird
|
||||
from ..dialects.informix import base as informix
|
||||
from ..dialects.mssql import base as mssql
|
||||
from ..dialects.sybase import base as sybase
|
||||
|
||||
@ -22,6 +23,7 @@ from ..dialects.sybase import base as sybase
|
||||
__all__ = (
|
||||
'drizzle',
|
||||
'firebird',
|
||||
'informix',
|
||||
'mssql',
|
||||
'mysql',
|
||||
'postgresql',
|
||||
|
@ -7,6 +7,7 @@
|
||||
__all__ = (
|
||||
'drizzle',
|
||||
'firebird',
|
||||
# 'informix',
|
||||
'mssql',
|
||||
'mysql',
|
||||
'oracle',
|
||||
|
@ -417,7 +417,6 @@ class DrizzleIdentifierPreparer(mysql_dialect.MySQLIdentifierPreparer):
|
||||
pass
|
||||
|
||||
|
||||
@log.class_logger
|
||||
class DrizzleDialect(mysql_dialect.MySQLDialect):
|
||||
"""Details of the Drizzle dialect.
|
||||
|
||||
@ -496,3 +495,4 @@ class DrizzleDialect(mysql_dialect.MySQLDialect):
|
||||
self._backslash_escapes = False
|
||||
|
||||
|
||||
log.class_logger(DrizzleDialect)
|
||||
|
@ -6,7 +6,7 @@
|
||||
|
||||
from sqlalchemy.dialects.firebird import base, kinterbasdb, fdb
|
||||
|
||||
base.dialect = fdb.dialect
|
||||
base.dialect = kinterbasdb.dialect
|
||||
|
||||
from sqlalchemy.dialects.firebird.base import \
|
||||
SMALLINT, BIGINT, FLOAT, FLOAT, DATE, TIME, \
|
||||
|
@ -402,8 +402,6 @@ class FBDialect(default.DefaultDialect):
|
||||
colspecs = colspecs
|
||||
ischema_names = ischema_names
|
||||
|
||||
construct_arguments = []
|
||||
|
||||
# defaults to dialect ver. 3,
|
||||
# will be autodetected off upon
|
||||
# first connect
|
||||
@ -477,34 +475,18 @@ class FBDialect(default.DefaultDialect):
|
||||
|
||||
@reflection.cache
|
||||
def get_table_names(self, connection, schema=None, **kw):
|
||||
# there are two queries commonly mentioned for this.
|
||||
# this one, using view_blr, is at the Firebird FAQ among other places:
|
||||
# http://www.firebirdfaq.org/faq174/
|
||||
s = """
|
||||
select rdb$relation_name
|
||||
from rdb$relations
|
||||
where rdb$view_blr is null
|
||||
and (rdb$system_flag is null or rdb$system_flag = 0);
|
||||
SELECT DISTINCT rdb$relation_name
|
||||
FROM rdb$relation_fields
|
||||
WHERE rdb$system_flag=0 AND rdb$view_context IS NULL
|
||||
"""
|
||||
|
||||
# the other query is this one. It's not clear if there's really
|
||||
# any difference between these two. This link:
|
||||
# http://www.alberton.info/firebird_sql_meta_info.html#.Ur3vXfZGni8
|
||||
# states them as interchangeable. Some discussion at [ticket:2898]
|
||||
# SELECT DISTINCT rdb$relation_name
|
||||
# FROM rdb$relation_fields
|
||||
# WHERE rdb$system_flag=0 AND rdb$view_context IS NULL
|
||||
|
||||
return [self.normalize_name(row[0]) for row in connection.execute(s)]
|
||||
|
||||
@reflection.cache
|
||||
def get_view_names(self, connection, schema=None, **kw):
|
||||
# see http://www.firebirdfaq.org/faq174/
|
||||
s = """
|
||||
select rdb$relation_name
|
||||
from rdb$relations
|
||||
where rdb$view_blr is not null
|
||||
and (rdb$system_flag is null or rdb$system_flag = 0);
|
||||
SELECT distinct rdb$view_name
|
||||
FROM rdb$view_relations
|
||||
"""
|
||||
return [self.normalize_name(row[0]) for row in connection.execute(s)]
|
||||
|
||||
@ -703,7 +685,7 @@ class FBDialect(default.DefaultDialect):
|
||||
self.normalize_name(row['fname']))
|
||||
fk['referred_columns'].append(
|
||||
self.normalize_name(row['targetfname']))
|
||||
return list(fks.values())
|
||||
return fks.values()
|
||||
|
||||
@reflection.cache
|
||||
def get_indexes(self, connection, table_name, schema=None, **kw):
|
||||
@ -734,5 +716,5 @@ class FBDialect(default.DefaultDialect):
|
||||
indexrec['column_names'].append(
|
||||
self.normalize_name(row['field_name']))
|
||||
|
||||
return list(indexes.values())
|
||||
return indexes.values()
|
||||
|
||||
|
@ -15,9 +15,10 @@
|
||||
|
||||
.. versionadded:: 0.8 - Support for the fdb Firebird driver.
|
||||
|
||||
.. versionchanged:: 0.9 - The fdb dialect is now the default dialect
|
||||
under the ``firebird://`` URL space, as ``fdb`` is now the official
|
||||
Python driver for Firebird.
|
||||
Status
|
||||
------
|
||||
|
||||
The fdb dialect is new and not yet tested (can't get fdb to build).
|
||||
|
||||
Arguments
|
||||
----------
|
||||
@ -44,7 +45,7 @@ dialect, however does not accept every argument that Kinterbasdb does.
|
||||
r = conn.execute(stmt)
|
||||
print r.rowcount
|
||||
|
||||
* ``retaining`` - False by default. Setting this to True will pass the
|
||||
* ``retaining`` - True by default. Leaving this on True will pass the
|
||||
``retaining=True`` keyword argument to the ``.commit()`` and ``.rollback()``
|
||||
methods of the DBAPI connection, which can improve performance in some
|
||||
situations, but apparently with significant caveats.
|
||||
@ -52,11 +53,8 @@ dialect, however does not accept every argument that Kinterbasdb does.
|
||||
understand the implications of this flag.
|
||||
|
||||
.. versionadded:: 0.8.2 - ``retaining`` keyword argument specifying
|
||||
transaction retaining behavior - in 0.8 it defaults to ``True``
|
||||
for backwards compatibility.
|
||||
|
||||
.. versionchanged:: 0.9.0 - the ``retaining`` flag defaults to ``False``.
|
||||
In 0.8 it defaulted to ``True``.
|
||||
transaction retaining behavior. This flag will **default to False**
|
||||
in 0.9.
|
||||
|
||||
.. seealso::
|
||||
|
||||
@ -72,7 +70,7 @@ from ... import util
|
||||
class FBDialect_fdb(FBDialect_kinterbasdb):
|
||||
|
||||
def __init__(self, enable_rowcount=True,
|
||||
retaining=False, **kwargs):
|
||||
retaining=True, **kwargs):
|
||||
super(FBDialect_fdb, self).__init__(
|
||||
enable_rowcount=enable_rowcount,
|
||||
retaining=retaining, **kwargs)
|
||||
|
@ -24,7 +24,7 @@ also accepts the following:
|
||||
|
||||
* ``concurrency_level`` - set the backend policy with regards to threading
|
||||
issues: by default SQLAlchemy uses policy 1. See the linked documents
|
||||
below for further information.
|
||||
below for futher information.
|
||||
|
||||
.. seealso::
|
||||
|
||||
@ -87,7 +87,7 @@ class FBDialect_kinterbasdb(FBDialect):
|
||||
|
||||
def __init__(self, type_conv=200, concurrency_level=1,
|
||||
enable_rowcount=True,
|
||||
retaining=False, **kwargs):
|
||||
retaining=True, **kwargs):
|
||||
super(FBDialect_kinterbasdb, self).__init__(**kwargs)
|
||||
self.enable_rowcount = enable_rowcount
|
||||
self.type_conv = type_conv
|
||||
|
@ -1,10 +1,9 @@
|
||||
# event/__init__.py
|
||||
# informix/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .api import CANCEL, NO_RETVAL, listen, listens_for, remove, contains
|
||||
from .base import Events, dispatcher
|
||||
from .attr import RefCollection
|
||||
from .legacy import _legacy_signature
|
||||
from sqlalchemy.dialects.informix import base, informixdb
|
||||
|
||||
base.dialect = informixdb.dialect
|
589
lib/sqlalchemy/dialects/informix/base.py
Normal file
589
lib/sqlalchemy/dialects/informix/base.py
Normal file
@ -0,0 +1,589 @@
|
||||
# informix/base.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
# coding: gbk
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: informix
|
||||
:name: Informix
|
||||
|
||||
.. note::
|
||||
|
||||
The Informix dialect functions on current SQLAlchemy versions
|
||||
but is not regularly tested, and may have many issues and
|
||||
caveats not currently handled.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
import datetime
|
||||
|
||||
from sqlalchemy import sql, schema, exc, pool, util
|
||||
from sqlalchemy.sql import compiler, text
|
||||
from sqlalchemy.engine import default, reflection
|
||||
from sqlalchemy import types as sqltypes
|
||||
|
||||
RESERVED_WORDS = set(
|
||||
["abs", "absolute", "access", "access_method", "acos", "active", "add",
|
||||
"address", "add_months", "admin", "after", "aggregate", "alignment",
|
||||
"all", "allocate", "all_rows", "alter", "and", "ansi", "any", "append",
|
||||
"array", "as", "asc", "ascii", "asin", "at", "atan", "atan2", "attach",
|
||||
"attributes", "audit", "authentication", "authid", "authorization",
|
||||
"authorized", "auto", "autofree", "auto_reprepare", "auto_stat_mode",
|
||||
"avg", "avoid_execute", "avoid_fact", "avoid_full", "avoid_hash",
|
||||
"avoid_index", "avoid_index_sj", "avoid_multi_index", "avoid_nl",
|
||||
"avoid_star_join", "avoid_subqf", "based", "before", "begin",
|
||||
"between", "bigint", "bigserial", "binary", "bitand", "bitandnot",
|
||||
"bitnot", "bitor", "bitxor", "blob", "blobdir", "boolean", "both",
|
||||
"bound_impl_pdq", "buffered", "builtin", "by", "byte", "cache", "call",
|
||||
"cannothash", "cardinality", "cascade", "case", "cast", "ceil", "char",
|
||||
"character", "character_length", "char_length", "check", "class",
|
||||
"class_origin", "client", "clob", "clobdir", "close", "cluster",
|
||||
"clustersize", "cobol", "codeset", "collation", "collection",
|
||||
"column", "columns", "commit", "committed", "commutator", "component",
|
||||
"components", "concat", "concurrent", "connect", "connection",
|
||||
"connection_name", "connect_by_iscycle", "connect_by_isleaf",
|
||||
"connect_by_rootconst", "constraint", "constraints", "constructor",
|
||||
"context", "continue", "copy", "cos", "costfunc", "count", "crcols",
|
||||
"create", "cross", "current", "current_role", "currval", "cursor",
|
||||
"cycle", "database", "datafiles", "dataskip", "date", "datetime",
|
||||
"day", "dba", "dbdate", "dbinfo", "dbpassword", "dbsecadm",
|
||||
"dbservername", "deallocate", "debug", "debugmode", "debug_env", "dec",
|
||||
"decimal", "declare", "decode", "decrypt_binary", "decrypt_char",
|
||||
"dec_t", "default", "default_role", "deferred", "deferred_prepare",
|
||||
"define", "delay", "delete", "deleting", "delimited", "delimiter",
|
||||
"deluxe", "desc", "describe", "descriptor", "detach", "diagnostics",
|
||||
"directives", "dirty", "disable", "disabled", "disconnect", "disk",
|
||||
"distinct", "distributebinary", "distributesreferences",
|
||||
"distributions", "document", "domain", "donotdistribute", "dormant",
|
||||
"double", "drop", "dtime_t", "each", "elif", "else", "enabled",
|
||||
"encryption", "encrypt_aes", "encrypt_tdes", "end", "enum",
|
||||
"environment", "error", "escape", "exception", "exclusive", "exec",
|
||||
"execute", "executeanywhere", "exemption", "exists", "exit", "exp",
|
||||
"explain", "explicit", "express", "expression", "extdirectives",
|
||||
"extend", "extent", "external", "fact", "false", "far", "fetch",
|
||||
"file", "filetoblob", "filetoclob", "fillfactor", "filtering", "first",
|
||||
"first_rows", "fixchar", "fixed", "float", "floor", "flush", "for",
|
||||
"force", "forced", "force_ddl_exec", "foreach", "foreign", "format",
|
||||
"format_units", "fortran", "found", "fraction", "fragment",
|
||||
"fragments", "free", "from", "full", "function", "general", "get",
|
||||
"gethint", "global", "go", "goto", "grant", "greaterthan",
|
||||
"greaterthanorequal", "group", "handlesnulls", "hash", "having", "hdr",
|
||||
"hex", "high", "hint", "hold", "home", "hour", "idslbacreadarray",
|
||||
"idslbacreadset", "idslbacreadtree", "idslbacrules",
|
||||
"idslbacwritearray", "idslbacwriteset", "idslbacwritetree",
|
||||
"idssecuritylabel", "if", "ifx_auto_reprepare", "ifx_batchedread_table",
|
||||
"ifx_int8_t", "ifx_lo_create_spec_t", "ifx_lo_stat_t", "immediate",
|
||||
"implicit", "implicit_pdq", "in", "inactive", "increment", "index",
|
||||
"indexes", "index_all", "index_sj", "indicator", "informix", "init",
|
||||
"initcap", "inline", "inner", "inout", "insert", "inserting", "instead",
|
||||
"int", "int8", "integ", "integer", "internal", "internallength",
|
||||
"interval", "into", "intrvl_t", "is", "iscanonical", "isolation",
|
||||
"item", "iterator", "java", "join", "keep", "key", "label", "labeleq",
|
||||
"labelge", "labelglb", "labelgt", "labelle", "labellt", "labellub",
|
||||
"labeltostring", "language", "last", "last_day", "leading", "left",
|
||||
"length", "lessthan", "lessthanorequal", "let", "level", "like",
|
||||
"limit", "list", "listing", "load", "local", "locator", "lock", "locks",
|
||||
"locopy", "loc_t", "log", "log10", "logn", "long", "loop", "lotofile",
|
||||
"low", "lower", "lpad", "ltrim", "lvarchar", "matched", "matches",
|
||||
"max", "maxerrors", "maxlen", "maxvalue", "mdy", "median", "medium",
|
||||
"memory", "memory_resident", "merge", "message_length", "message_text",
|
||||
"middle", "min", "minute", "minvalue", "mod", "mode", "moderate",
|
||||
"modify", "module", "money", "month", "months_between", "mounting",
|
||||
"multiset", "multi_index", "name", "nchar", "negator", "new", "next",
|
||||
"nextval", "next_day", "no", "nocache", "nocycle", "nomaxvalue",
|
||||
"nomigrate", "nominvalue", "none", "non_dim", "non_resident", "noorder",
|
||||
"normal", "not", "notemplatearg", "notequal", "null", "nullif",
|
||||
"numeric", "numrows", "numtodsinterval", "numtoyminterval", "nvarchar",
|
||||
"nvl", "octet_length", "of", "off", "old", "on", "online", "only",
|
||||
"opaque", "opclass", "open", "optcompind", "optical", "optimization",
|
||||
"option", "or", "order", "ordered", "out", "outer", "output",
|
||||
"override", "page", "parallelizable", "parameter", "partition",
|
||||
"pascal", "passedbyvalue", "password", "pdqpriority", "percaltl_cos",
|
||||
"pipe", "pli", "pload", "policy", "pow", "power", "precision",
|
||||
"prepare", "previous", "primary", "prior", "private", "privileges",
|
||||
"procedure", "properties", "public", "put", "raise", "range", "raw",
|
||||
"read", "real", "recordend", "references", "referencing", "register",
|
||||
"rejectfile", "relative", "release", "remainder", "rename",
|
||||
"reoptimization", "repeatable", "replace", "replication", "reserve",
|
||||
"resolution", "resource", "restart", "restrict", "resume", "retain",
|
||||
"retainupdatelocks", "return", "returned_sqlstate", "returning",
|
||||
"returns", "reuse", "revoke", "right", "robin", "role", "rollback",
|
||||
"rollforward", "root", "round", "routine", "row", "rowid", "rowids",
|
||||
"rows", "row_count", "rpad", "rtrim", "rule", "sameas", "samples",
|
||||
"sampling", "save", "savepoint", "schema", "scroll", "seclabel_by_comp",
|
||||
"seclabel_by_name", "seclabel_to_char", "second", "secondary",
|
||||
"section", "secured", "security", "selconst", "select", "selecting",
|
||||
"selfunc", "selfuncargs", "sequence", "serial", "serial8",
|
||||
"serializable", "serveruuid", "server_name", "session", "set",
|
||||
"setsessionauth", "share", "short", "siblings", "signed", "sin",
|
||||
"sitename", "size", "skall", "skinhibit", "skip", "skshow",
|
||||
"smallfloat", "smallint", "some", "specific", "sql", "sqlcode",
|
||||
"sqlcontext", "sqlerror", "sqlstate", "sqlwarning", "sqrt",
|
||||
"stability", "stack", "standard", "start", "star_join", "statchange",
|
||||
"statement", "static", "statistics", "statlevel", "status", "stdev",
|
||||
"step", "stop", "storage", "store", "strategies", "string",
|
||||
"stringtolabel", "struct", "style", "subclass_origin", "substr",
|
||||
"substring", "sum", "support", "sync", "synonym", "sysdate",
|
||||
"sysdbclose", "sysdbopen", "system", "sys_connect_by_path", "table",
|
||||
"tables", "tan", "task", "temp", "template", "test", "text", "then",
|
||||
"time", "timeout", "to", "today", "to_char", "to_date",
|
||||
"to_dsinterval", "to_number", "to_yminterval", "trace", "trailing",
|
||||
"transaction", "transition", "tree", "trigger", "triggers", "trim",
|
||||
"true", "trunc", "truncate", "trusted", "type", "typedef", "typeid",
|
||||
"typename", "typeof", "uid", "uncommitted", "under", "union",
|
||||
"unique", "units", "unknown", "unload", "unlock", "unsigned",
|
||||
"update", "updating", "upon", "upper", "usage", "use",
|
||||
"uselastcommitted", "user", "use_hash", "use_nl", "use_subqf",
|
||||
"using", "value", "values", "var", "varchar", "variable", "variance",
|
||||
"variant", "varying", "vercols", "view", "violations", "void",
|
||||
"volatile", "wait", "warning", "weekday", "when", "whenever", "where",
|
||||
"while", "with", "without", "work", "write", "writedown", "writeup",
|
||||
"xadatasource", "xid", "xload", "xunload", "year"
|
||||
])
|
||||
|
||||
|
||||
class InfoDateTime(sqltypes.DateTime):
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
def process(value):
|
||||
if value is not None:
|
||||
if value.microsecond:
|
||||
value = value.replace(microsecond=0)
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
class InfoTime(sqltypes.Time):
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
def process(value):
|
||||
if value is not None:
|
||||
if value.microsecond:
|
||||
value = value.replace(microsecond=0)
|
||||
return value
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
def process(value):
|
||||
if isinstance(value, datetime.datetime):
|
||||
return value.time()
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
||||
colspecs = {
|
||||
sqltypes.DateTime: InfoDateTime,
|
||||
sqltypes.TIMESTAMP: InfoDateTime,
|
||||
sqltypes.Time: InfoTime,
|
||||
}
|
||||
|
||||
|
||||
ischema_names = {
|
||||
0: sqltypes.CHAR, # CHAR
|
||||
1: sqltypes.SMALLINT, # SMALLINT
|
||||
2: sqltypes.INTEGER, # INT
|
||||
3: sqltypes.FLOAT, # Float
|
||||
3: sqltypes.Float, # SmallFloat
|
||||
5: sqltypes.DECIMAL, # DECIMAL
|
||||
6: sqltypes.Integer, # Serial
|
||||
7: sqltypes.DATE, # DATE
|
||||
8: sqltypes.Numeric, # MONEY
|
||||
10: sqltypes.DATETIME, # DATETIME
|
||||
11: sqltypes.LargeBinary, # BYTE
|
||||
12: sqltypes.TEXT, # TEXT
|
||||
13: sqltypes.VARCHAR, # VARCHAR
|
||||
15: sqltypes.NCHAR, # NCHAR
|
||||
16: sqltypes.NVARCHAR, # NVARCHAR
|
||||
17: sqltypes.Integer, # INT8
|
||||
18: sqltypes.Integer, # Serial8
|
||||
43: sqltypes.String, # LVARCHAR
|
||||
-1: sqltypes.BLOB, # BLOB
|
||||
-1: sqltypes.CLOB, # CLOB
|
||||
}
|
||||
|
||||
|
||||
class InfoTypeCompiler(compiler.GenericTypeCompiler):
|
||||
def visit_DATETIME(self, type_):
|
||||
return "DATETIME YEAR TO SECOND"
|
||||
|
||||
def visit_TIME(self, type_):
|
||||
return "DATETIME HOUR TO SECOND"
|
||||
|
||||
def visit_TIMESTAMP(self, type_):
|
||||
return "DATETIME YEAR TO SECOND"
|
||||
|
||||
def visit_large_binary(self, type_):
|
||||
return "BYTE"
|
||||
|
||||
def visit_boolean(self, type_):
|
||||
return "SMALLINT"
|
||||
|
||||
|
||||
class InfoSQLCompiler(compiler.SQLCompiler):
|
||||
|
||||
def default_from(self):
|
||||
return " from systables where tabname = 'systables' "
|
||||
|
||||
def get_select_precolumns(self, select):
|
||||
s = ""
|
||||
if select._offset:
|
||||
s += "SKIP %s " % select._offset
|
||||
if select._limit:
|
||||
s += "FIRST %s " % select._limit
|
||||
s += select._distinct and "DISTINCT " or ""
|
||||
return s
|
||||
|
||||
def visit_select(self, select, asfrom=False, parens=True, **kw):
|
||||
text = compiler.SQLCompiler.visit_select(self, select, asfrom, parens, **kw)
|
||||
if asfrom and parens and self.dialect.server_version_info < (11,):
|
||||
#assuming that 11 version doesn't need this, not tested
|
||||
return "table(multiset" + text + ")"
|
||||
else:
|
||||
return text
|
||||
|
||||
def limit_clause(self, select):
|
||||
return ""
|
||||
|
||||
def visit_function(self, func, **kw):
|
||||
if func.name.lower() == 'current_date':
|
||||
return "today"
|
||||
elif func.name.lower() == 'current_time':
|
||||
return "CURRENT HOUR TO SECOND"
|
||||
elif func.name.lower() in ('current_timestamp', 'now'):
|
||||
return "CURRENT YEAR TO SECOND"
|
||||
else:
|
||||
return compiler.SQLCompiler.visit_function(self, func, **kw)
|
||||
|
||||
def visit_mod_binary(self, binary, operator, **kw):
|
||||
return "MOD(%s, %s)" % (self.process(binary.left, **kw),
|
||||
self.process(binary.right, **kw))
|
||||
|
||||
|
||||
class InfoDDLCompiler(compiler.DDLCompiler):
|
||||
|
||||
def visit_add_constraint(self, create):
|
||||
preparer = self.preparer
|
||||
return "ALTER TABLE %s ADD CONSTRAINT %s" % (
|
||||
self.preparer.format_table(create.element.table),
|
||||
self.process(create.element)
|
||||
)
|
||||
|
||||
def get_column_specification(self, column, **kw):
|
||||
colspec = self.preparer.format_column(column)
|
||||
first = None
|
||||
if column.primary_key and column.autoincrement:
|
||||
try:
|
||||
first = [c for c in column.table.primary_key.columns
|
||||
if (c.autoincrement and
|
||||
isinstance(c.type, sqltypes.Integer) and
|
||||
not c.foreign_keys)].pop(0)
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
if column is first:
|
||||
colspec += " SERIAL"
|
||||
else:
|
||||
colspec += " " + self.dialect.type_compiler.process(column.type)
|
||||
default = self.get_column_default_string(column)
|
||||
if default is not None:
|
||||
colspec += " DEFAULT " + default
|
||||
|
||||
if not column.nullable:
|
||||
colspec += " NOT NULL"
|
||||
|
||||
return colspec
|
||||
|
||||
def get_column_default_string(self, column):
|
||||
if (isinstance(column.server_default, schema.DefaultClause) and
|
||||
isinstance(column.server_default.arg, basestring)):
|
||||
if isinstance(column.type, (sqltypes.Integer, sqltypes.Numeric)):
|
||||
return self.sql_compiler.process(text(column.server_default.arg))
|
||||
|
||||
return super(InfoDDLCompiler, self).get_column_default_string(column)
|
||||
|
||||
### Informix wants the constraint name at the end, hence this ist c&p from sql/compiler.py
|
||||
def visit_primary_key_constraint(self, constraint):
|
||||
if len(constraint) == 0:
|
||||
return ''
|
||||
text = "PRIMARY KEY "
|
||||
text += "(%s)" % ', '.join(self.preparer.quote(c.name, c.quote)
|
||||
for c in constraint)
|
||||
text += self.define_constraint_deferrability(constraint)
|
||||
|
||||
if constraint.name is not None:
|
||||
text += " CONSTRAINT %s" % self.preparer.format_constraint(constraint)
|
||||
return text
|
||||
|
||||
def visit_foreign_key_constraint(self, constraint):
|
||||
preparer = self.dialect.identifier_preparer
|
||||
remote_table = list(constraint._elements.values())[0].column.table
|
||||
text = "FOREIGN KEY (%s) REFERENCES %s (%s)" % (
|
||||
', '.join(preparer.quote(f.parent.name, f.parent.quote)
|
||||
for f in constraint._elements.values()),
|
||||
preparer.format_table(remote_table),
|
||||
', '.join(preparer.quote(f.column.name, f.column.quote)
|
||||
for f in constraint._elements.values())
|
||||
)
|
||||
text += self.define_constraint_cascades(constraint)
|
||||
text += self.define_constraint_deferrability(constraint)
|
||||
|
||||
if constraint.name is not None:
|
||||
text += " CONSTRAINT %s " % \
|
||||
preparer.format_constraint(constraint)
|
||||
return text
|
||||
|
||||
def visit_unique_constraint(self, constraint):
|
||||
text = "UNIQUE (%s)" % (', '.join(self.preparer.quote(c.name, c.quote) for c in constraint))
|
||||
text += self.define_constraint_deferrability(constraint)
|
||||
|
||||
if constraint.name is not None:
|
||||
text += "CONSTRAINT %s " % self.preparer.format_constraint(constraint)
|
||||
return text
|
||||
|
||||
|
||||
class InformixIdentifierPreparer(compiler.IdentifierPreparer):
|
||||
|
||||
reserved_words = RESERVED_WORDS
|
||||
|
||||
|
||||
class InformixDialect(default.DefaultDialect):
|
||||
name = 'informix'
|
||||
|
||||
max_identifier_length = 128 # adjusts at runtime based on server version
|
||||
|
||||
type_compiler = InfoTypeCompiler
|
||||
statement_compiler = InfoSQLCompiler
|
||||
ddl_compiler = InfoDDLCompiler
|
||||
colspecs = colspecs
|
||||
ischema_names = ischema_names
|
||||
preparer = InformixIdentifierPreparer
|
||||
default_paramstyle = 'qmark'
|
||||
|
||||
def initialize(self, connection):
|
||||
super(InformixDialect, self).initialize(connection)
|
||||
|
||||
# http://www.querix.com/support/knowledge-base/error_number_message/error_200
|
||||
if self.server_version_info < (9, 2):
|
||||
self.max_identifier_length = 18
|
||||
else:
|
||||
self.max_identifier_length = 128
|
||||
|
||||
def _get_table_names(self, connection, schema, type, **kw):
|
||||
schema = schema or self.default_schema_name
|
||||
s = "select tabname, owner from systables where owner=? and tabtype=?"
|
||||
return [row[0] for row in connection.execute(s, schema, type)]
|
||||
|
||||
@reflection.cache
|
||||
def get_table_names(self, connection, schema=None, **kw):
|
||||
return self._get_table_names(connection, schema, 'T', **kw)
|
||||
|
||||
@reflection.cache
|
||||
def get_view_names(self, connection, schema=None, **kw):
|
||||
return self._get_table_names(connection, schema, 'V', **kw)
|
||||
|
||||
@reflection.cache
|
||||
def get_schema_names(self, connection, **kw):
|
||||
s = "select owner from systables"
|
||||
return [row[0] for row in connection.execute(s)]
|
||||
|
||||
def has_table(self, connection, table_name, schema=None):
|
||||
schema = schema or self.default_schema_name
|
||||
cursor = connection.execute(
|
||||
"""select tabname from systables where tabname=? and owner=?""",
|
||||
table_name, schema)
|
||||
return cursor.first() is not None
|
||||
|
||||
@reflection.cache
|
||||
def get_columns(self, connection, table_name, schema=None, **kw):
|
||||
schema = schema or self.default_schema_name
|
||||
c = connection.execute(
|
||||
"""select colname, coltype, collength, t3.default, t1.colno from
|
||||
syscolumns as t1 , systables as t2 , OUTER sysdefaults as t3
|
||||
where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=?
|
||||
and t3.tabid = t2.tabid and t3.colno = t1.colno
|
||||
order by t1.colno""", table_name, schema)
|
||||
|
||||
pk_constraint = self.get_pk_constraint(connection, table_name, schema, **kw)
|
||||
primary_cols = pk_constraint['constrained_columns']
|
||||
|
||||
columns = []
|
||||
rows = c.fetchall()
|
||||
for name, colattr, collength, default, colno in rows:
|
||||
name = name.lower()
|
||||
|
||||
autoincrement = False
|
||||
primary_key = False
|
||||
|
||||
if name in primary_cols:
|
||||
primary_key = True
|
||||
|
||||
# in 7.31, coltype = 0x000
|
||||
# ^^-- column type
|
||||
# ^-- 1 not null, 0 null
|
||||
not_nullable, coltype = divmod(colattr, 256)
|
||||
if coltype not in (0, 13) and default:
|
||||
default = default.split()[-1]
|
||||
|
||||
if coltype == 6: # Serial, mark as autoincrement
|
||||
autoincrement = True
|
||||
|
||||
if coltype == 0 or coltype == 13: # char, varchar
|
||||
coltype = ischema_names[coltype](collength)
|
||||
if default:
|
||||
default = "'%s'" % default
|
||||
elif coltype == 5: # decimal
|
||||
precision, scale = (collength & 0xFF00) >> 8, collength & 0xFF
|
||||
if scale == 255:
|
||||
scale = 0
|
||||
coltype = sqltypes.Numeric(precision, scale)
|
||||
else:
|
||||
try:
|
||||
coltype = ischema_names[coltype]
|
||||
except KeyError:
|
||||
util.warn("Did not recognize type '%s' of column '%s'" %
|
||||
(coltype, name))
|
||||
coltype = sqltypes.NULLTYPE
|
||||
|
||||
column_info = dict(name=name, type=coltype, nullable=not not_nullable,
|
||||
default=default, autoincrement=autoincrement,
|
||||
primary_key=primary_key)
|
||||
columns.append(column_info)
|
||||
return columns
|
||||
|
||||
@reflection.cache
|
||||
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
|
||||
schema_sel = schema or self.default_schema_name
|
||||
c = connection.execute(
|
||||
"""select t1.constrname as cons_name,
|
||||
t4.colname as local_column, t7.tabname as remote_table,
|
||||
t6.colname as remote_column, t7.owner as remote_owner
|
||||
from sysconstraints as t1 , systables as t2 ,
|
||||
sysindexes as t3 , syscolumns as t4 ,
|
||||
sysreferences as t5 , syscolumns as t6 , systables as t7 ,
|
||||
sysconstraints as t8 , sysindexes as t9
|
||||
where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=? and t1.constrtype = 'R'
|
||||
and t3.tabid = t2.tabid and t3.idxname = t1.idxname
|
||||
and t4.tabid = t2.tabid and t4.colno in (t3.part1, t3.part2, t3.part3,
|
||||
t3.part4, t3.part5, t3.part6, t3.part7, t3.part8, t3.part9, t3.part10,
|
||||
t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16)
|
||||
and t5.constrid = t1.constrid and t8.constrid = t5.primary
|
||||
and t6.tabid = t5.ptabid and t6.colno in (t9.part1, t9.part2, t9.part3,
|
||||
t9.part4, t9.part5, t9.part6, t9.part7, t9.part8, t9.part9, t9.part10,
|
||||
t9.part11, t9.part11, t9.part12, t9.part13, t9.part4, t9.part15, t9.part16) and t9.idxname =
|
||||
t8.idxname
|
||||
and t7.tabid = t5.ptabid""", table_name, schema_sel)
|
||||
|
||||
def fkey_rec():
|
||||
return {
|
||||
'name': None,
|
||||
'constrained_columns': [],
|
||||
'referred_schema': None,
|
||||
'referred_table': None,
|
||||
'referred_columns': []
|
||||
}
|
||||
|
||||
fkeys = util.defaultdict(fkey_rec)
|
||||
|
||||
rows = c.fetchall()
|
||||
for cons_name, local_column, \
|
||||
remote_table, remote_column, remote_owner in rows:
|
||||
|
||||
rec = fkeys[cons_name]
|
||||
rec['name'] = cons_name
|
||||
local_cols, remote_cols = \
|
||||
rec['constrained_columns'], rec['referred_columns']
|
||||
|
||||
if not rec['referred_table']:
|
||||
rec['referred_table'] = remote_table
|
||||
if schema is not None:
|
||||
rec['referred_schema'] = remote_owner
|
||||
|
||||
if local_column not in local_cols:
|
||||
local_cols.append(local_column)
|
||||
if remote_column not in remote_cols:
|
||||
remote_cols.append(remote_column)
|
||||
|
||||
return fkeys.values()
|
||||
|
||||
@reflection.cache
|
||||
def get_pk_constraint(self, connection, table_name, schema=None, **kw):
|
||||
schema = schema or self.default_schema_name
|
||||
|
||||
# Select the column positions from sysindexes for sysconstraints
|
||||
data = connection.execute(
|
||||
"""select t2.*
|
||||
from systables as t1, sysindexes as t2, sysconstraints as t3
|
||||
where t1.tabid=t2.tabid and t1.tabname=? and t1.owner=?
|
||||
and t2.idxname=t3.idxname and t3.constrtype='P'""",
|
||||
table_name, schema
|
||||
).fetchall()
|
||||
|
||||
colpositions = set()
|
||||
|
||||
for row in data:
|
||||
colpos = set([getattr(row, 'part%d' % x) for x in range(1, 16)])
|
||||
colpositions |= colpos
|
||||
|
||||
if not len(colpositions):
|
||||
return {'constrained_columns': [], 'name': None}
|
||||
|
||||
# Select the column names using the columnpositions
|
||||
# TODO: Maybe cache a bit of those col infos (eg select all colnames for one table)
|
||||
place_holder = ','.join('?' * len(colpositions))
|
||||
c = connection.execute(
|
||||
"""select t1.colname
|
||||
from syscolumns as t1, systables as t2
|
||||
where t2.tabname=? and t1.tabid = t2.tabid and
|
||||
t1.colno in (%s)""" % place_holder,
|
||||
table_name, *colpositions
|
||||
).fetchall()
|
||||
|
||||
cols = reduce(lambda x, y: list(x) + list(y), c, [])
|
||||
return {'constrained_columns': cols, 'name': None}
|
||||
|
||||
@reflection.cache
|
||||
def get_indexes(self, connection, table_name, schema, **kw):
|
||||
# TODO: schema...
|
||||
c = connection.execute(
|
||||
"""select t1.*
|
||||
from sysindexes as t1 , systables as t2
|
||||
where t1.tabid = t2.tabid and t2.tabname=?""",
|
||||
table_name)
|
||||
|
||||
indexes = []
|
||||
for row in c.fetchall():
|
||||
colnames = [getattr(row, 'part%d' % x) for x in range(1, 16)]
|
||||
colnames = [x for x in colnames if x]
|
||||
place_holder = ','.join('?' * len(colnames))
|
||||
c = connection.execute(
|
||||
"""select t1.colname
|
||||
from syscolumns as t1, systables as t2
|
||||
where t2.tabname=? and t1.tabid = t2.tabid and
|
||||
t1.colno in (%s)""" % place_holder,
|
||||
table_name, *colnames
|
||||
).fetchall()
|
||||
c = reduce(lambda x, y: list(x) + list(y), c, [])
|
||||
indexes.append({
|
||||
'name': row.idxname,
|
||||
'unique': row.idxtype.lower() == 'u',
|
||||
'column_names': c
|
||||
})
|
||||
return indexes
|
||||
|
||||
@reflection.cache
|
||||
def get_view_definition(self, connection, view_name, schema=None, **kw):
|
||||
schema = schema or self.default_schema_name
|
||||
c = connection.execute(
|
||||
"""select t1.viewtext
|
||||
from sysviews as t1 , systables as t2
|
||||
where t1.tabid=t2.tabid and t2.tabname=?
|
||||
and t2.owner=? order by seqno""",
|
||||
view_name, schema).fetchall()
|
||||
|
||||
return ''.join([row[0] for row in c])
|
||||
|
||||
def _get_default_schema_name(self, connection):
|
||||
return connection.execute('select CURRENT_ROLE from systables').scalar()
|
69
lib/sqlalchemy/dialects/informix/informixdb.py
Normal file
69
lib/sqlalchemy/dialects/informix/informixdb.py
Normal file
@ -0,0 +1,69 @@
|
||||
# informix/informixdb.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
|
||||
.. dialect:: informix+informixdb
|
||||
:name: informixdb
|
||||
:dbapi: informixdb
|
||||
:connectstring: informix+informixdb://user:password@host/dbname
|
||||
:url: http://informixdb.sourceforge.net/
|
||||
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from sqlalchemy.dialects.informix.base import InformixDialect
|
||||
from sqlalchemy.engine import default
|
||||
|
||||
VERSION_RE = re.compile(r'(\d+)\.(\d+)(.+\d+)')
|
||||
|
||||
|
||||
class InformixExecutionContext_informixdb(default.DefaultExecutionContext):
|
||||
|
||||
def post_exec(self):
|
||||
if self.isinsert:
|
||||
self._lastrowid = self.cursor.sqlerrd[1]
|
||||
|
||||
def get_lastrowid(self):
|
||||
return self._lastrowid
|
||||
|
||||
|
||||
class InformixDialect_informixdb(InformixDialect):
|
||||
driver = 'informixdb'
|
||||
execution_ctx_cls = InformixExecutionContext_informixdb
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('informixdb')
|
||||
|
||||
def create_connect_args(self, url):
|
||||
if url.host:
|
||||
dsn = '%s@%s' % (url.database, url.host)
|
||||
else:
|
||||
dsn = url.database
|
||||
|
||||
if url.username:
|
||||
opt = {'user': url.username, 'password': url.password}
|
||||
else:
|
||||
opt = {}
|
||||
|
||||
return ([dsn], opt)
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
# http://informixdb.sourceforge.net/manual.html#inspecting-version-numbers
|
||||
v = VERSION_RE.split(connection.connection.dbms_version)
|
||||
return (int(v[1]), int(v[2]), v[3])
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, self.dbapi.OperationalError):
|
||||
return 'closed the connection' in str(e) \
|
||||
or 'connection not open' in str(e)
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
dialect = InformixDialect_informixdb
|
@ -103,48 +103,21 @@ for these types will be issued as DATETIME.
|
||||
|
||||
.. _mssql_indexes:
|
||||
|
||||
Clustered Index Support
|
||||
-----------------------
|
||||
|
||||
The MSSQL dialect supports clustered indexes (and primary keys) via the
|
||||
``mssql_clustered`` option. This option is available to :class:`.Index`,
|
||||
:class:`.UniqueConstraint`. and :class:`.PrimaryKeyConstraint`.
|
||||
|
||||
To generate a clustered index::
|
||||
|
||||
Index("my_index", table.c.x, mssql_clustered=True)
|
||||
|
||||
which renders the index as ``CREATE CLUSTERED INDEX my_index ON table (x)``.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
|
||||
To generate a clustered primary key use::
|
||||
|
||||
Table('my_table', metadata,
|
||||
Column('x', ...),
|
||||
Column('y', ...),
|
||||
PrimaryKeyConstraint("x", "y", mssql_clustered=True))
|
||||
|
||||
which will render the table, for example, as::
|
||||
|
||||
CREATE TABLE my_table (x INTEGER NOT NULL, y INTEGER NOT NULL, PRIMARY KEY CLUSTERED (x, y))
|
||||
|
||||
Similarly, we can generate a clustered unique constraint using::
|
||||
|
||||
Table('my_table', metadata,
|
||||
Column('x', ...),
|
||||
Column('y', ...),
|
||||
PrimaryKeyConstraint("x"),
|
||||
UniqueConstraint("y", mssql_clustered=True),
|
||||
)
|
||||
|
||||
.. versionadded:: 0.9.2
|
||||
|
||||
MSSQL-Specific Index Options
|
||||
-----------------------------
|
||||
|
||||
In addition to clustering, the MSSQL dialect supports other special options
|
||||
for :class:`.Index`.
|
||||
The MSSQL dialect supports special options for :class:`.Index`.
|
||||
|
||||
CLUSTERED
|
||||
^^^^^^^^^^
|
||||
|
||||
The ``mssql_clustered`` option adds the CLUSTERED keyword to the index::
|
||||
|
||||
Index("my_index", table.c.x, mssql_clustered=True)
|
||||
|
||||
would render the index as ``CREATE CLUSTERED INDEX my_index ON table (x)``
|
||||
|
||||
.. versionadded:: 0.8
|
||||
|
||||
INCLUDE
|
||||
^^^^^^^
|
||||
@ -322,7 +295,7 @@ class _MSDate(sqltypes.Date):
|
||||
def process(value):
|
||||
if isinstance(value, datetime.datetime):
|
||||
return value.date()
|
||||
elif isinstance(value, util.string_types):
|
||||
elif isinstance(value, basestring):
|
||||
return datetime.date(*[
|
||||
int(x or 0)
|
||||
for x in self._reg.match(value).groups()
|
||||
@ -355,7 +328,7 @@ class TIME(sqltypes.TIME):
|
||||
def process(value):
|
||||
if isinstance(value, datetime.datetime):
|
||||
return value.time()
|
||||
elif isinstance(value, util.string_types):
|
||||
elif isinstance(value, basestring):
|
||||
return datetime.time(*[
|
||||
int(x or 0)
|
||||
for x in self._reg.match(value).groups()])
|
||||
@ -1018,7 +991,7 @@ class MSDDLCompiler(compiler.DDLCompiler):
|
||||
text += "UNIQUE "
|
||||
|
||||
# handle clustering option
|
||||
if index.dialect_options['mssql']['clustered']:
|
||||
if index.kwargs.get("mssql_clustered"):
|
||||
text += "CLUSTERED "
|
||||
|
||||
text += "INDEX %s ON %s (%s)" \
|
||||
@ -1033,13 +1006,13 @@ class MSDDLCompiler(compiler.DDLCompiler):
|
||||
)
|
||||
|
||||
# handle other included columns
|
||||
if index.dialect_options['mssql']['include']:
|
||||
if index.kwargs.get("mssql_include"):
|
||||
inclusions = [index.table.c[col]
|
||||
if isinstance(col, util.string_types) else col
|
||||
for col in index.dialect_options['mssql']['include']]
|
||||
if isinstance(col, basestring) else col
|
||||
for col in index.kwargs["mssql_include"]]
|
||||
|
||||
text += " INCLUDE (%s)" \
|
||||
% ', '.join([preparer.quote(c.name)
|
||||
% ', '.join([preparer.quote(c.name, c.quote)
|
||||
for c in inclusions])
|
||||
|
||||
return text
|
||||
@ -1050,40 +1023,6 @@ class MSDDLCompiler(compiler.DDLCompiler):
|
||||
self.preparer.format_table(drop.element.table)
|
||||
)
|
||||
|
||||
def visit_primary_key_constraint(self, constraint):
|
||||
if len(constraint) == 0:
|
||||
return ''
|
||||
text = ""
|
||||
if constraint.name is not None:
|
||||
text += "CONSTRAINT %s " % \
|
||||
self.preparer.format_constraint(constraint)
|
||||
text += "PRIMARY KEY "
|
||||
|
||||
if constraint.dialect_options['mssql']['clustered']:
|
||||
text += "CLUSTERED "
|
||||
|
||||
text += "(%s)" % ', '.join(self.preparer.quote(c.name)
|
||||
for c in constraint)
|
||||
text += self.define_constraint_deferrability(constraint)
|
||||
return text
|
||||
|
||||
def visit_unique_constraint(self, constraint):
|
||||
if len(constraint) == 0:
|
||||
return ''
|
||||
text = ""
|
||||
if constraint.name is not None:
|
||||
text += "CONSTRAINT %s " % \
|
||||
self.preparer.format_constraint(constraint)
|
||||
text += "UNIQUE "
|
||||
|
||||
if constraint.dialect_options['mssql']['clustered']:
|
||||
text += "CLUSTERED "
|
||||
|
||||
text += "(%s)" % ', '.join(self.preparer.quote(c.name)
|
||||
for c in constraint)
|
||||
text += self.define_constraint_deferrability(constraint)
|
||||
return text
|
||||
|
||||
class MSIdentifierPreparer(compiler.IdentifierPreparer):
|
||||
reserved_words = RESERVED_WORDS
|
||||
|
||||
@ -1094,7 +1033,7 @@ class MSIdentifierPreparer(compiler.IdentifierPreparer):
|
||||
def _escape_identifier(self, value):
|
||||
return value
|
||||
|
||||
def quote_schema(self, schema, force=None):
|
||||
def quote_schema(self, schema, force=True):
|
||||
"""Prepare a quoted table and schema name."""
|
||||
result = '.'.join([self.quote(x, force) for x in schema.split('.')])
|
||||
return result
|
||||
@ -1164,24 +1103,11 @@ class MSDialect(default.DefaultDialect):
|
||||
type_compiler = MSTypeCompiler
|
||||
preparer = MSIdentifierPreparer
|
||||
|
||||
construct_arguments = [
|
||||
(sa_schema.PrimaryKeyConstraint, {
|
||||
"clustered": False
|
||||
}),
|
||||
(sa_schema.UniqueConstraint, {
|
||||
"clustered": False
|
||||
}),
|
||||
(sa_schema.Index, {
|
||||
"clustered": False,
|
||||
"include": None
|
||||
})
|
||||
]
|
||||
|
||||
def __init__(self,
|
||||
query_timeout=None,
|
||||
use_scope_identity=True,
|
||||
max_identifier_length=None,
|
||||
schema_name="dbo", **opts):
|
||||
schema_name=u"dbo", **opts):
|
||||
self.query_timeout = int(query_timeout or 0)
|
||||
self.schema_name = schema_name
|
||||
|
||||
@ -1201,7 +1127,7 @@ class MSDialect(default.DefaultDialect):
|
||||
|
||||
def initialize(self, connection):
|
||||
super(MSDialect, self).initialize(connection)
|
||||
if self.server_version_info[0] not in list(range(8, 17)):
|
||||
if self.server_version_info[0] not in range(8, 17):
|
||||
# FreeTDS with version 4.2 seems to report here
|
||||
# a number like "95.10.255". Don't know what
|
||||
# that is. So emit warning.
|
||||
@ -1216,23 +1142,34 @@ class MSDialect(default.DefaultDialect):
|
||||
self.implicit_returning = True
|
||||
|
||||
def _get_default_schema_name(self, connection):
|
||||
query = sql.text("""
|
||||
user_name = connection.scalar("SELECT user_name()")
|
||||
if user_name is not None:
|
||||
# now, get the default schema
|
||||
query = sql.text("""
|
||||
SELECT default_schema_name FROM
|
||||
sys.database_principals
|
||||
WHERE principal_id=database_principal_id()
|
||||
""")
|
||||
default_schema_name = connection.scalar(query)
|
||||
if default_schema_name is not None:
|
||||
return util.text_type(default_schema_name)
|
||||
WHERE name = :name
|
||||
AND type = 'S'
|
||||
""")
|
||||
try:
|
||||
default_schema_name = connection.scalar(query, name=user_name)
|
||||
if default_schema_name is not None:
|
||||
return unicode(default_schema_name)
|
||||
except:
|
||||
pass
|
||||
return self.schema_name
|
||||
|
||||
def _unicode_cast(self, column):
|
||||
if self.server_version_info >= MS_2005_VERSION:
|
||||
return cast(column, NVARCHAR(_warn_on_bytestring=False))
|
||||
else:
|
||||
return self.schema_name
|
||||
return column
|
||||
|
||||
@_db_plus_owner
|
||||
def has_table(self, connection, tablename, dbname, owner, schema):
|
||||
columns = ischema.columns
|
||||
|
||||
whereclause = columns.c.table_name == tablename
|
||||
|
||||
whereclause = self._unicode_cast(columns.c.table_name) == tablename
|
||||
if owner:
|
||||
whereclause = sql.and_(whereclause,
|
||||
columns.c.table_schema == owner)
|
||||
@ -1255,7 +1192,7 @@ class MSDialect(default.DefaultDialect):
|
||||
s = sql.select([tables.c.table_name],
|
||||
sql.and_(
|
||||
tables.c.table_schema == owner,
|
||||
tables.c.table_type == 'BASE TABLE'
|
||||
tables.c.table_type == u'BASE TABLE'
|
||||
),
|
||||
order_by=[tables.c.table_name]
|
||||
)
|
||||
@ -1269,7 +1206,7 @@ class MSDialect(default.DefaultDialect):
|
||||
s = sql.select([tables.c.table_name],
|
||||
sql.and_(
|
||||
tables.c.table_schema == owner,
|
||||
tables.c.table_type == 'VIEW'
|
||||
tables.c.table_type == u'VIEW'
|
||||
),
|
||||
order_by=[tables.c.table_name]
|
||||
)
|
||||
@ -1334,7 +1271,7 @@ class MSDialect(default.DefaultDialect):
|
||||
if row['index_id'] in indexes:
|
||||
indexes[row['index_id']]['column_names'].append(row['name'])
|
||||
|
||||
return list(indexes.values())
|
||||
return indexes.values()
|
||||
|
||||
@reflection.cache
|
||||
@_db_plus_owner
|
||||
@ -1541,4 +1478,4 @@ class MSDialect(default.DefaultDialect):
|
||||
local_cols.append(scol)
|
||||
remote_cols.append(rcol)
|
||||
|
||||
return list(fkeys.values())
|
||||
return fkeys.values()
|
||||
|
@ -7,7 +7,7 @@
|
||||
# TODO: should be using the sys. catalog with SQL Server, not information schema
|
||||
|
||||
from ... import Table, MetaData, Column
|
||||
from ...types import String, Unicode, UnicodeText, Integer, TypeDecorator
|
||||
from ...types import String, Unicode, Integer, TypeDecorator
|
||||
from ... import cast
|
||||
from ... import util
|
||||
from ...sql import expression
|
||||
@ -19,8 +19,10 @@ class CoerceUnicode(TypeDecorator):
|
||||
impl = Unicode
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
if util.py2k and isinstance(value, util.binary_type):
|
||||
# Py2K
|
||||
if isinstance(value, str):
|
||||
value = value.decode(dialect.encoding)
|
||||
# end Py2K
|
||||
return value
|
||||
|
||||
def bind_expression(self, bindvalue):
|
||||
|
@ -16,6 +16,7 @@ pymssql is a Python module that provides a Python DBAPI interface around
|
||||
Linux, MacOSX and Windows platforms.
|
||||
|
||||
"""
|
||||
|
||||
from .base import MSDialect
|
||||
from ... import types as sqltypes, util, processors
|
||||
import re
|
||||
|
@ -224,7 +224,7 @@ class MSExecutionContext_pyodbc(MSExecutionContext):
|
||||
# without closing it (FreeTDS particularly)
|
||||
row = self.cursor.fetchall()[0]
|
||||
break
|
||||
except self.dialect.dbapi.Error as e:
|
||||
except self.dialect.dbapi.Error, e:
|
||||
# no way around this - nextset() consumes the previous set
|
||||
# so we need to just keep flipping
|
||||
self.cursor.nextset()
|
||||
|
@ -288,8 +288,10 @@ Foreign Key Arguments to Avoid
|
||||
|
||||
MySQL does not support the foreign key arguments "DEFERRABLE", "INITIALLY",
|
||||
or "MATCH". Using the ``deferrable`` or ``initially`` keyword argument with
|
||||
:class:`.ForeignKeyConstraint` or :class:`.ForeignKey` will have the effect of these keywords being
|
||||
rendered in a DDL expression, which will then raise an error on MySQL.
|
||||
:class:`.ForeignKeyConstraint` or :class:`.ForeignKey` will have the effect of
|
||||
these keywords being ignored in a DDL expression along with a warning, however this behavior
|
||||
**will change** in a future release.
|
||||
|
||||
In order to use these keywords on a foreign key while having them ignored
|
||||
on a MySQL backend, use a custom compile rule::
|
||||
|
||||
@ -301,20 +303,22 @@ on a MySQL backend, use a custom compile rule::
|
||||
element.deferrable = element.initially = None
|
||||
return compiler.visit_foreign_key_constraint(element, **kw)
|
||||
|
||||
.. versionchanged:: 0.9.0 - the MySQL backend no longer silently ignores
|
||||
.. versionchanged:: 0.8.3 - the MySQL backend will emit a warning when the
|
||||
the ``deferrable`` or ``initially`` keyword arguments of :class:`.ForeignKeyConstraint`
|
||||
and :class:`.ForeignKey`.
|
||||
and :class:`.ForeignKey` are used. The arguments will no longer be
|
||||
ignored in 0.9.
|
||||
|
||||
The "MATCH" keyword is in fact more insidious, and is explicitly disallowed
|
||||
The "MATCH" keyword is in fact more insidious, and in a future release will be
|
||||
explicitly disallowed
|
||||
by SQLAlchemy in conjunction with the MySQL backend. This argument is silently
|
||||
ignored by MySQL, but in addition has the effect of ON UPDATE and ON DELETE options
|
||||
also being ignored by the backend. Therefore MATCH should never be used with the
|
||||
MySQL backend; as is the case with DEFERRABLE and INITIALLY, custom compilation
|
||||
rules can be used to correct a MySQL ForeignKeyConstraint at DDL definition time.
|
||||
|
||||
.. versionadded:: 0.9.0 - the MySQL backend will raise a :class:`.CompileError`
|
||||
when the ``match`` keyword is used with :class:`.ForeignKeyConstraint`
|
||||
or :class:`.ForeignKey`.
|
||||
.. versionadded:: 0.8.3 - the MySQL backend will emit a warning when
|
||||
the ``match`` keyword is used with :class:`.ForeignKeyConstraint`
|
||||
or :class:`.ForeignKey`. This will be a :class:`.CompileError` in 0.9.
|
||||
|
||||
Reflection of Foreign Key Constraints
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
@ -412,21 +416,13 @@ SET_RE = re.compile(
|
||||
|
||||
|
||||
class _NumericType(object):
|
||||
"""Base for MySQL numeric types.
|
||||
|
||||
This is the base both for NUMERIC as well as INTEGER, hence
|
||||
it's a mixin.
|
||||
|
||||
"""
|
||||
"""Base for MySQL numeric types."""
|
||||
|
||||
def __init__(self, unsigned=False, zerofill=False, **kw):
|
||||
self.unsigned = unsigned
|
||||
self.zerofill = zerofill
|
||||
super(_NumericType, self).__init__(**kw)
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(self,
|
||||
to_inspect=[_NumericType, sqltypes.Numeric])
|
||||
|
||||
class _FloatType(_NumericType, sqltypes.Float):
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
@ -438,27 +434,22 @@ class _FloatType(_NumericType, sqltypes.Float):
|
||||
raise exc.ArgumentError(
|
||||
"You must specify both precision and scale or omit "
|
||||
"both altogether.")
|
||||
|
||||
super(_FloatType, self).__init__(precision=precision, asdecimal=asdecimal, **kw)
|
||||
self.scale = scale
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(self,
|
||||
to_inspect=[_FloatType, _NumericType, sqltypes.Float])
|
||||
|
||||
class _IntegerType(_NumericType, sqltypes.Integer):
|
||||
def __init__(self, display_width=None, **kw):
|
||||
self.display_width = display_width
|
||||
super(_IntegerType, self).__init__(**kw)
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(self,
|
||||
to_inspect=[_IntegerType, _NumericType, sqltypes.Integer])
|
||||
|
||||
class _StringType(sqltypes.String):
|
||||
"""Base for MySQL string types."""
|
||||
|
||||
def __init__(self, charset=None, collation=None,
|
||||
ascii=False, binary=False, unicode=False,
|
||||
ascii=False, binary=False,
|
||||
national=False, **kw):
|
||||
self.charset = charset
|
||||
|
||||
@ -466,14 +457,16 @@ class _StringType(sqltypes.String):
|
||||
kw.setdefault('collation', kw.pop('collate', collation))
|
||||
|
||||
self.ascii = ascii
|
||||
self.unicode = unicode
|
||||
# We have to munge the 'unicode' param strictly as a dict
|
||||
# otherwise 2to3 will turn it into str.
|
||||
self.__dict__['unicode'] = kw.get('unicode', False)
|
||||
# sqltypes.String does not accept the 'unicode' arg at all.
|
||||
if 'unicode' in kw:
|
||||
del kw['unicode']
|
||||
self.binary = binary
|
||||
self.national = national
|
||||
super(_StringType, self).__init__(**kw)
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(self,
|
||||
to_inspect=[_StringType, sqltypes.String])
|
||||
|
||||
class NUMERIC(_NumericType, sqltypes.NUMERIC):
|
||||
"""MySQL NUMERIC type."""
|
||||
@ -533,14 +526,6 @@ class DOUBLE(_FloatType):
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a DOUBLE.
|
||||
|
||||
.. note::
|
||||
|
||||
The :class:`.DOUBLE` type by default converts from float
|
||||
to Decimal, using a truncation that defaults to 10 digits. Specify
|
||||
either ``scale=n`` or ``decimal_return_scale=n`` in order to change
|
||||
this scale, or ``asdecimal=False`` to return values directly as
|
||||
Python floating points.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
@ -566,14 +551,6 @@ class REAL(_FloatType, sqltypes.REAL):
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a REAL.
|
||||
|
||||
.. note::
|
||||
|
||||
The :class:`.REAL` type by default converts from float
|
||||
to Decimal, using a truncation that defaults to 10 digits. Specify
|
||||
either ``scale=n`` or ``decimal_return_scale=n`` in order to change
|
||||
this scale, or ``asdecimal=False`` to return values directly as
|
||||
Python floating points.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
@ -753,7 +730,7 @@ class BIT(sqltypes.TypeEngine):
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
v = 0
|
||||
v = 0L
|
||||
for i in map(ord, value):
|
||||
v = v << 8 | i
|
||||
return v
|
||||
@ -1059,25 +1036,6 @@ class CHAR(_StringType, sqltypes.CHAR):
|
||||
"""
|
||||
super(CHAR, self).__init__(length=length, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def _adapt_string_for_cast(self, type_):
|
||||
# copy the given string type into a CHAR
|
||||
# for the purposes of rendering a CAST expression
|
||||
type_ = sqltypes.to_instance(type_)
|
||||
if isinstance(type_, sqltypes.CHAR):
|
||||
return type_
|
||||
elif isinstance(type_, _StringType):
|
||||
return CHAR(
|
||||
length=type_.length,
|
||||
charset=type_.charset,
|
||||
collation=type_.collation,
|
||||
ascii=type_.ascii,
|
||||
binary=type_.binary,
|
||||
unicode=type_.unicode,
|
||||
national=False # not supported in CAST
|
||||
)
|
||||
else:
|
||||
return CHAR(length=type_.length)
|
||||
|
||||
class NVARCHAR(_StringType, sqltypes.NVARCHAR):
|
||||
"""MySQL NVARCHAR type.
|
||||
@ -1148,49 +1106,8 @@ class LONGBLOB(sqltypes._Binary):
|
||||
|
||||
__visit_name__ = 'LONGBLOB'
|
||||
|
||||
class _EnumeratedValues(_StringType):
|
||||
def _init_values(self, values, kw):
|
||||
self.quoting = kw.pop('quoting', 'auto')
|
||||
|
||||
if self.quoting == 'auto' and len(values):
|
||||
# What quoting character are we using?
|
||||
q = None
|
||||
for e in values:
|
||||
if len(e) == 0:
|
||||
self.quoting = 'unquoted'
|
||||
break
|
||||
elif q is None:
|
||||
q = e[0]
|
||||
|
||||
if len(e) == 1 or e[0] != q or e[-1] != q:
|
||||
self.quoting = 'unquoted'
|
||||
break
|
||||
else:
|
||||
self.quoting = 'quoted'
|
||||
|
||||
if self.quoting == 'quoted':
|
||||
util.warn_deprecated(
|
||||
'Manually quoting %s value literals is deprecated. Supply '
|
||||
'unquoted values and use the quoting= option in cases of '
|
||||
'ambiguity.' % self.__class__.__name__)
|
||||
|
||||
values = self._strip_values(values)
|
||||
|
||||
self._enumerated_values = values
|
||||
length = max([len(v) for v in values] + [0])
|
||||
return values, length
|
||||
|
||||
@classmethod
|
||||
def _strip_values(cls, values):
|
||||
strip_values = []
|
||||
for a in values:
|
||||
if a[0:1] == '"' or a[0:1] == "'":
|
||||
# strip enclosing quotes and unquote interior
|
||||
a = a[1:-1].replace(a[0] * 2, a[0])
|
||||
strip_values.append(a)
|
||||
return strip_values
|
||||
|
||||
class ENUM(sqltypes.Enum, _EnumeratedValues):
|
||||
class ENUM(sqltypes.Enum, _StringType):
|
||||
"""MySQL ENUM type."""
|
||||
|
||||
__visit_name__ = 'ENUM'
|
||||
@ -1198,9 +1115,9 @@ class ENUM(sqltypes.Enum, _EnumeratedValues):
|
||||
def __init__(self, *enums, **kw):
|
||||
"""Construct an ENUM.
|
||||
|
||||
E.g.::
|
||||
Example:
|
||||
|
||||
Column('myenum', ENUM("foo", "bar", "baz"))
|
||||
Column('myenum', MSEnum("foo", "bar", "baz"))
|
||||
|
||||
:param enums: The range of valid values for this ENUM. Values will be
|
||||
quoted when generating the schema according to the quoting flag (see
|
||||
@ -1244,8 +1161,33 @@ class ENUM(sqltypes.Enum, _EnumeratedValues):
|
||||
literals for you. This is a transitional option.
|
||||
|
||||
"""
|
||||
values, length = self._init_values(enums, kw)
|
||||
self.quoting = kw.pop('quoting', 'auto')
|
||||
|
||||
if self.quoting == 'auto' and len(enums):
|
||||
# What quoting character are we using?
|
||||
q = None
|
||||
for e in enums:
|
||||
if len(e) == 0:
|
||||
self.quoting = 'unquoted'
|
||||
break
|
||||
elif q is None:
|
||||
q = e[0]
|
||||
|
||||
if e[0] != q or e[-1] != q:
|
||||
self.quoting = 'unquoted'
|
||||
break
|
||||
else:
|
||||
self.quoting = 'quoted'
|
||||
|
||||
if self.quoting == 'quoted':
|
||||
util.warn_deprecated(
|
||||
'Manually quoting ENUM value literals is deprecated. Supply '
|
||||
'unquoted values and use the quoting= option in cases of '
|
||||
'ambiguity.')
|
||||
enums = self._strip_enums(enums)
|
||||
|
||||
self.strict = kw.pop('strict', False)
|
||||
length = max([len(v) for v in enums] + [0])
|
||||
kw.pop('metadata', None)
|
||||
kw.pop('schema', None)
|
||||
kw.pop('name', None)
|
||||
@ -1253,11 +1195,17 @@ class ENUM(sqltypes.Enum, _EnumeratedValues):
|
||||
kw.pop('native_enum', None)
|
||||
kw.pop('inherit_schema', None)
|
||||
_StringType.__init__(self, length=length, **kw)
|
||||
sqltypes.Enum.__init__(self, *values)
|
||||
sqltypes.Enum.__init__(self, *enums)
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(self,
|
||||
to_inspect=[ENUM, _StringType, sqltypes.Enum])
|
||||
@classmethod
|
||||
def _strip_enums(cls, enums):
|
||||
strip_enums = []
|
||||
for a in enums:
|
||||
if a[0:1] == '"' or a[0:1] == "'":
|
||||
# strip enclosing quotes and unquote interior
|
||||
a = a[1:-1].replace(a[0] * 2, a[0])
|
||||
strip_enums.append(a)
|
||||
return strip_enums
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
super_convert = super(ENUM, self).bind_processor(dialect)
|
||||
@ -1272,13 +1220,12 @@ class ENUM(sqltypes.Enum, _EnumeratedValues):
|
||||
return value
|
||||
return process
|
||||
|
||||
def adapt(self, cls, **kw):
|
||||
if issubclass(cls, ENUM):
|
||||
kw['strict'] = self.strict
|
||||
return sqltypes.Enum.adapt(self, cls, **kw)
|
||||
def adapt(self, impltype, **kw):
|
||||
kw['strict'] = self.strict
|
||||
return sqltypes.Enum.adapt(self, impltype, **kw)
|
||||
|
||||
|
||||
class SET(_EnumeratedValues):
|
||||
class SET(_StringType):
|
||||
"""MySQL SET type."""
|
||||
|
||||
__visit_name__ = 'SET'
|
||||
@ -1286,16 +1233,15 @@ class SET(_EnumeratedValues):
|
||||
def __init__(self, *values, **kw):
|
||||
"""Construct a SET.
|
||||
|
||||
E.g.::
|
||||
Example::
|
||||
|
||||
Column('myset', SET("foo", "bar", "baz"))
|
||||
Column('myset', MSSet("'foo'", "'bar'", "'baz'"))
|
||||
|
||||
:param values: The range of valid values for this SET. Values will be
|
||||
quoted when generating the schema according to the quoting flag (see
|
||||
below).
|
||||
|
||||
.. versionchanged:: 0.9.0 quoting is applied automatically to
|
||||
:class:`.mysql.SET` in the same way as for :class:`.mysql.ENUM`.
|
||||
used exactly as they appear when generating schemas. Strings must
|
||||
be quoted, as in the example above. Single-quotes are suggested for
|
||||
ANSI compatibility and are required for portability to servers with
|
||||
ANSI_QUOTES enabled.
|
||||
|
||||
:param charset: Optional, a column-level character set for this string
|
||||
value. Takes precedence to 'ascii' or 'unicode' short-hand.
|
||||
@ -1314,27 +1260,18 @@ class SET(_EnumeratedValues):
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
:param quoting: Defaults to 'auto': automatically determine enum value
|
||||
quoting. If all enum values are surrounded by the same quoting
|
||||
character, then use 'quoted' mode. Otherwise, use 'unquoted' mode.
|
||||
|
||||
'quoted': values in enums are already quoted, they will be used
|
||||
directly when generating the schema - this usage is deprecated.
|
||||
|
||||
'unquoted': values in enums are not quoted, they will be escaped and
|
||||
surrounded by single quotes when generating the schema.
|
||||
|
||||
Previous versions of this type always required manually quoted
|
||||
values to be supplied; future versions will always quote the string
|
||||
literals for you. This is a transitional option.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
"""
|
||||
values, length = self._init_values(values, kw)
|
||||
self.values = tuple(values)
|
||||
self._ddl_values = values
|
||||
|
||||
kw.setdefault('length', length)
|
||||
strip_values = []
|
||||
for a in values:
|
||||
if a[0:1] == '"' or a[0:1] == "'":
|
||||
# strip enclosing quotes and unquote interior
|
||||
a = a[1:-1].replace(a[0] * 2, a[0])
|
||||
strip_values.append(a)
|
||||
|
||||
self.values = strip_values
|
||||
kw.setdefault('length', max([len(v) for v in strip_values] + [0]))
|
||||
super(SET, self).__init__(**kw)
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
@ -1343,10 +1280,14 @@ class SET(_EnumeratedValues):
|
||||
# No ',' quoting issues- commas aren't allowed in SET values
|
||||
# The bad news:
|
||||
# Plenty of driver inconsistencies here.
|
||||
if isinstance(value, set):
|
||||
if isinstance(value, util.set_types):
|
||||
# ..some versions convert '' to an empty set
|
||||
if not value:
|
||||
value.add('')
|
||||
# ..some return sets.Set, even for pythons
|
||||
# that have __builtin__.set
|
||||
if not isinstance(value, set):
|
||||
value = set(value)
|
||||
return value
|
||||
# ...and some versions return strings
|
||||
if value is not None:
|
||||
@ -1359,7 +1300,7 @@ class SET(_EnumeratedValues):
|
||||
super_convert = super(SET, self).bind_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
if value is None or isinstance(value, util.int_types + util.string_types):
|
||||
if value is None or isinstance(value, (int, long, basestring)):
|
||||
pass
|
||||
else:
|
||||
if None in value:
|
||||
@ -1406,9 +1347,6 @@ MSFloat = FLOAT
|
||||
MSInteger = INTEGER
|
||||
|
||||
colspecs = {
|
||||
_IntegerType: _IntegerType,
|
||||
_NumericType: _NumericType,
|
||||
_FloatType: _FloatType,
|
||||
sqltypes.Numeric: NUMERIC,
|
||||
sqltypes.Float: FLOAT,
|
||||
sqltypes.Time: TIME,
|
||||
@ -1500,9 +1438,14 @@ class MySQLCompiler(compiler.SQLCompiler):
|
||||
elif isinstance(type_, (sqltypes.DECIMAL, sqltypes.DateTime,
|
||||
sqltypes.Date, sqltypes.Time)):
|
||||
return self.dialect.type_compiler.process(type_)
|
||||
elif isinstance(type_, sqltypes.String) and not isinstance(type_, (ENUM, SET)):
|
||||
adapted = CHAR._adapt_string_for_cast(type_)
|
||||
return self.dialect.type_compiler.process(adapted)
|
||||
elif isinstance(type_, sqltypes.Text):
|
||||
return 'CHAR'
|
||||
elif (isinstance(type_, sqltypes.String) and not
|
||||
isinstance(type_, (ENUM, SET))):
|
||||
if getattr(type_, 'length'):
|
||||
return 'CHAR(%s)' % type_.length
|
||||
else:
|
||||
return 'CHAR'
|
||||
elif isinstance(type_, sqltypes._Binary):
|
||||
return 'BINARY'
|
||||
elif isinstance(type_, sqltypes.NUMERIC):
|
||||
@ -1538,7 +1481,7 @@ class MySQLCompiler(compiler.SQLCompiler):
|
||||
of a SELECT.
|
||||
|
||||
"""
|
||||
if isinstance(select._distinct, util.string_types):
|
||||
if isinstance(select._distinct, basestring):
|
||||
return select._distinct.upper() + " "
|
||||
elif select._distinct:
|
||||
return "DISTINCT "
|
||||
@ -1546,6 +1489,11 @@ class MySQLCompiler(compiler.SQLCompiler):
|
||||
return ""
|
||||
|
||||
def visit_join(self, join, asfrom=False, **kwargs):
|
||||
# 'JOIN ... ON ...' for inner joins isn't available until 4.0.
|
||||
# Apparently < 3.23.17 requires theta joins for inner joins
|
||||
# (but not outer). Not generating these currently, but
|
||||
# support can be added, preferably after dialects are
|
||||
# refactored to be version-sensitive.
|
||||
return ''.join(
|
||||
(self.process(join.left, asfrom=True, **kwargs),
|
||||
(join.isouter and " LEFT OUTER JOIN " or " INNER JOIN "),
|
||||
@ -1554,10 +1502,10 @@ class MySQLCompiler(compiler.SQLCompiler):
|
||||
self.process(join.onclause, **kwargs)))
|
||||
|
||||
def for_update_clause(self, select):
|
||||
if select._for_update_arg.read:
|
||||
return " LOCK IN SHARE MODE"
|
||||
if select.for_update == 'read':
|
||||
return ' LOCK IN SHARE MODE'
|
||||
else:
|
||||
return " FOR UPDATE"
|
||||
return super(MySQLCompiler, self).for_update_clause(select)
|
||||
|
||||
def limit_clause(self, select):
|
||||
# MySQL supports:
|
||||
@ -1621,9 +1569,9 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
|
||||
constraint_string = super(
|
||||
MySQLDDLCompiler, self).create_table_constraints(table)
|
||||
|
||||
# why self.dialect.name and not 'mysql'? because of drizzle
|
||||
is_innodb = 'engine' in table.dialect_options[self.dialect.name] and \
|
||||
table.dialect_options[self.dialect.name]['engine'].lower() == 'innodb'
|
||||
engine_key = '%s_engine' % self.dialect.name
|
||||
is_innodb = table.kwargs.has_key(engine_key) and \
|
||||
table.kwargs[engine_key].lower() == 'innodb'
|
||||
|
||||
auto_inc_column = table._autoincrement_column
|
||||
|
||||
@ -1634,7 +1582,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
|
||||
constraint_string += ", \n\t"
|
||||
constraint_string += "KEY %s (%s)" % (
|
||||
self.preparer.quote(
|
||||
"idx_autoinc_%s" % auto_inc_column.name
|
||||
"idx_autoinc_%s" % auto_inc_column.name, None
|
||||
),
|
||||
self.preparer.format_column(auto_inc_column)
|
||||
)
|
||||
@ -1718,8 +1666,8 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
|
||||
text += "UNIQUE "
|
||||
text += "INDEX %s ON %s " % (name, table)
|
||||
|
||||
length = index.dialect_options['mysql']['length']
|
||||
if length is not None:
|
||||
if 'mysql_length' in index.kwargs:
|
||||
length = index.kwargs['mysql_length']
|
||||
|
||||
if isinstance(length, dict):
|
||||
# length value can be a (column_name --> integer value) mapping
|
||||
@ -1740,18 +1688,19 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
|
||||
columns = ', '.join(columns)
|
||||
text += '(%s)' % columns
|
||||
|
||||
using = index.dialect_options['mysql']['using']
|
||||
if using is not None:
|
||||
text += " USING %s" % (preparer.quote(using))
|
||||
if 'mysql_using' in index.kwargs:
|
||||
using = index.kwargs['mysql_using']
|
||||
text += " USING %s" % (preparer.quote(using, index.quote))
|
||||
|
||||
return text
|
||||
|
||||
def visit_primary_key_constraint(self, constraint):
|
||||
text = super(MySQLDDLCompiler, self).\
|
||||
visit_primary_key_constraint(constraint)
|
||||
using = constraint.dialect_options['mysql']['using']
|
||||
if using:
|
||||
text += " USING %s" % (self.preparer.quote(using))
|
||||
if "mysql_using" in constraint.kwargs:
|
||||
using = constraint.kwargs['mysql_using']
|
||||
text += " USING %s" % (
|
||||
self.preparer.quote(using, constraint.quote))
|
||||
return text
|
||||
|
||||
def visit_drop_index(self, drop):
|
||||
@ -1780,11 +1729,24 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
|
||||
(self.preparer.format_table(constraint.table),
|
||||
qual, const)
|
||||
|
||||
def define_constraint_deferrability(self, constraint):
|
||||
if constraint.deferrable is not None:
|
||||
util.warn("The 'deferrable' keyword will no longer be ignored by the "
|
||||
"MySQL backend in 0.9 - please adjust so that this keyword is "
|
||||
"not used in conjunction with MySQL.")
|
||||
if constraint.initially is not None:
|
||||
util.warn("The 'initially' keyword will no longer be ignored by the "
|
||||
"MySQL backend in 0.9 - please adjust so that this keyword is "
|
||||
"not used in conjunction with MySQL.")
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
def define_constraint_match(self, constraint):
|
||||
if constraint.match is not None:
|
||||
raise exc.CompileError(
|
||||
"MySQL ignores the 'MATCH' keyword while at the same time "
|
||||
"causes ON UPDATE/ON DELETE clauses to be ignored.")
|
||||
util.warn("MySQL ignores the 'MATCH' keyword while at the same time "
|
||||
"causes ON UPDATE/ON DELETE clauses to be ignored - "
|
||||
"this will be an exception in 0.9.")
|
||||
return ""
|
||||
|
||||
class MySQLTypeCompiler(compiler.GenericTypeCompiler):
|
||||
@ -2024,7 +1986,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
|
||||
if not type_.native_enum:
|
||||
return super(MySQLTypeCompiler, self).visit_enum(type_)
|
||||
else:
|
||||
return self._visit_enumerated_values("ENUM", type_, type_.enums)
|
||||
return self.visit_ENUM(type_)
|
||||
|
||||
def visit_BLOB(self, type_):
|
||||
if type_.length:
|
||||
@ -2041,21 +2003,16 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
|
||||
def visit_LONGBLOB(self, type_):
|
||||
return "LONGBLOB"
|
||||
|
||||
def _visit_enumerated_values(self, name, type_, enumerated_values):
|
||||
quoted_enums = []
|
||||
for e in enumerated_values:
|
||||
quoted_enums.append("'%s'" % e.replace("'", "''"))
|
||||
return self._extend_string(type_, {}, "%s(%s)" % (
|
||||
name, ",".join(quoted_enums))
|
||||
)
|
||||
|
||||
def visit_ENUM(self, type_):
|
||||
return self._visit_enumerated_values("ENUM", type_,
|
||||
type_._enumerated_values)
|
||||
quoted_enums = []
|
||||
for e in type_.enums:
|
||||
quoted_enums.append("'%s'" % e.replace("'", "''"))
|
||||
return self._extend_string(type_, {}, "ENUM(%s)" %
|
||||
",".join(quoted_enums))
|
||||
|
||||
def visit_SET(self, type_):
|
||||
return self._visit_enumerated_values("SET", type_,
|
||||
type_._enumerated_values)
|
||||
return self._extend_string(type_, {}, "SET(%s)" %
|
||||
",".join(type_._ddl_values))
|
||||
|
||||
def visit_BOOLEAN(self, type):
|
||||
return "BOOL"
|
||||
@ -2082,7 +2039,6 @@ class MySQLIdentifierPreparer(compiler.IdentifierPreparer):
|
||||
return tuple([self.quote_identifier(i) for i in ids if i is not None])
|
||||
|
||||
|
||||
@log.class_logger
|
||||
class MySQLDialect(default.DefaultDialect):
|
||||
"""Details of the MySQL dialect. Not used directly in application code."""
|
||||
|
||||
@ -2114,22 +2070,6 @@ class MySQLDialect(default.DefaultDialect):
|
||||
_backslash_escapes = True
|
||||
_server_ansiquotes = False
|
||||
|
||||
construct_arguments = [
|
||||
(sa_schema.Table, {
|
||||
"*": None
|
||||
}),
|
||||
(sql.Update, {
|
||||
"limit": None
|
||||
}),
|
||||
(sa_schema.PrimaryKeyConstraint, {
|
||||
"using": None
|
||||
}),
|
||||
(sa_schema.Index, {
|
||||
"using": None,
|
||||
"length": None,
|
||||
})
|
||||
]
|
||||
|
||||
def __init__(self, isolation_level=None, **kwargs):
|
||||
kwargs.pop('use_ansiquotes', None) # legacy
|
||||
default.DefaultDialect.__init__(self, **kwargs)
|
||||
@ -2277,7 +2217,7 @@ class MySQLDialect(default.DefaultDialect):
|
||||
have = rs.fetchone() is not None
|
||||
rs.close()
|
||||
return have
|
||||
except exc.DBAPIError as e:
|
||||
except exc.DBAPIError, e:
|
||||
if self._extract_error_code(e.orig) == 1146:
|
||||
return False
|
||||
raise
|
||||
@ -2286,6 +2226,7 @@ class MySQLDialect(default.DefaultDialect):
|
||||
rs.close()
|
||||
|
||||
def initialize(self, connection):
|
||||
default.DefaultDialect.initialize(self, connection)
|
||||
self._connection_charset = self._detect_charset(connection)
|
||||
self._detect_ansiquotes(connection)
|
||||
if self._server_ansiquotes:
|
||||
@ -2294,8 +2235,6 @@ class MySQLDialect(default.DefaultDialect):
|
||||
self.identifier_preparer = self.preparer(self,
|
||||
server_ansiquotes=self._server_ansiquotes)
|
||||
|
||||
default.DefaultDialect.initialize(self, connection)
|
||||
|
||||
@property
|
||||
def _supports_cast(self):
|
||||
return self.server_version_info is None or \
|
||||
@ -2392,7 +2331,7 @@ class MySQLDialect(default.DefaultDialect):
|
||||
ref_names = spec['foreign']
|
||||
|
||||
con_kw = {}
|
||||
for opt in ('onupdate', 'ondelete'):
|
||||
for opt in ('name', 'onupdate', 'ondelete'):
|
||||
if spec.get(opt, False):
|
||||
con_kw[opt] = spec[opt]
|
||||
|
||||
@ -2565,7 +2504,6 @@ class MySQLDialect(default.DefaultDialect):
|
||||
# as of MySQL 5.0.1
|
||||
self._backslash_escapes = 'NO_BACKSLASH_ESCAPES' not in mode
|
||||
|
||||
|
||||
def _show_create_table(self, connection, table, charset=None,
|
||||
full_name=None):
|
||||
"""Run SHOW CREATE TABLE for a ``Table``."""
|
||||
@ -2577,7 +2515,7 @@ class MySQLDialect(default.DefaultDialect):
|
||||
rp = None
|
||||
try:
|
||||
rp = connection.execute(st)
|
||||
except exc.DBAPIError as e:
|
||||
except exc.DBAPIError, e:
|
||||
if self._extract_error_code(e.orig) == 1146:
|
||||
raise exc.NoSuchTableError(full_name)
|
||||
else:
|
||||
@ -2601,7 +2539,7 @@ class MySQLDialect(default.DefaultDialect):
|
||||
try:
|
||||
try:
|
||||
rp = connection.execute(st)
|
||||
except exc.DBAPIError as e:
|
||||
except exc.DBAPIError, e:
|
||||
if self._extract_error_code(e.orig) == 1146:
|
||||
raise exc.NoSuchTableError(full_name)
|
||||
else:
|
||||
@ -2624,7 +2562,6 @@ class ReflectedState(object):
|
||||
self.constraints = []
|
||||
|
||||
|
||||
@log.class_logger
|
||||
class MySQLTableDefinitionParser(object):
|
||||
"""Parses the results of a SHOW CREATE TABLE statement."""
|
||||
|
||||
@ -2789,8 +2726,8 @@ class MySQLTableDefinitionParser(object):
|
||||
if spec.get(kw, False):
|
||||
type_kw[kw] = spec[kw]
|
||||
|
||||
if issubclass(col_type, _EnumeratedValues):
|
||||
type_args = _EnumeratedValues._strip_values(type_args)
|
||||
if type_ == 'enum':
|
||||
type_args = ENUM._strip_enums(type_args)
|
||||
|
||||
type_instance = col_type(*type_args, **type_kw)
|
||||
|
||||
@ -2964,7 +2901,7 @@ class MySQLTableDefinitionParser(object):
|
||||
#
|
||||
# unique constraints come back as KEYs
|
||||
kw = quotes.copy()
|
||||
kw['on'] = 'RESTRICT|CASCADE|SET NULL|NOACTION'
|
||||
kw['on'] = 'RESTRICT|CASCASDE|SET NULL|NOACTION'
|
||||
self._re_constraint = _re_compile(
|
||||
r' '
|
||||
r'CONSTRAINT +'
|
||||
@ -3027,6 +2964,8 @@ class MySQLTableDefinitionParser(object):
|
||||
_options_of_type_string = ('COMMENT', 'DATA DIRECTORY', 'INDEX DIRECTORY',
|
||||
'PASSWORD', 'CONNECTION')
|
||||
|
||||
log.class_logger(MySQLTableDefinitionParser)
|
||||
log.class_logger(MySQLDialect)
|
||||
|
||||
|
||||
class _DecodingRowProxy(object):
|
||||
@ -3050,8 +2989,11 @@ class _DecodingRowProxy(object):
|
||||
item = self.rowproxy[index]
|
||||
if isinstance(item, _array):
|
||||
item = item.tostring()
|
||||
|
||||
if self.charset and isinstance(item, util.binary_type):
|
||||
# Py2K
|
||||
if self.charset and isinstance(item, str):
|
||||
# end Py2K
|
||||
# Py3K
|
||||
#if self.charset and isinstance(item, bytes):
|
||||
return item.decode(self.charset)
|
||||
else:
|
||||
return item
|
||||
@ -3060,7 +3002,11 @@ class _DecodingRowProxy(object):
|
||||
item = getattr(self.rowproxy, attr)
|
||||
if isinstance(item, _array):
|
||||
item = item.tostring()
|
||||
if self.charset and isinstance(item, util.binary_type):
|
||||
# Py2K
|
||||
if self.charset and isinstance(item, str):
|
||||
# end Py2K
|
||||
# Py3K
|
||||
#if self.charset and isinstance(item, bytes):
|
||||
return item.decode(self.charset)
|
||||
else:
|
||||
return item
|
||||
|
@ -13,7 +13,6 @@
|
||||
:url: https://github.com/nakagami/CyMySQL
|
||||
|
||||
"""
|
||||
import re
|
||||
|
||||
from .mysqldb import MySQLDialect_mysqldb
|
||||
from .base import (BIT, MySQLDialect)
|
||||
@ -26,9 +25,15 @@ class _cymysqlBIT(BIT):
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
v = 0
|
||||
for i in util.iterbytes(value):
|
||||
# Py2K
|
||||
v = 0L
|
||||
for i in map(ord, value):
|
||||
v = v << 8 | i
|
||||
# end Py2K
|
||||
# Py3K
|
||||
#v = 0
|
||||
#for i in value:
|
||||
# v = v << 8 | i
|
||||
return v
|
||||
return value
|
||||
return process
|
||||
@ -38,9 +43,7 @@ class MySQLDialect_cymysql(MySQLDialect_mysqldb):
|
||||
driver = 'cymysql'
|
||||
|
||||
description_encoding = None
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = False
|
||||
supports_unicode_statements = True
|
||||
supports_sane_rowcount = False
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MySQLDialect.colspecs,
|
||||
@ -55,13 +58,7 @@ class MySQLDialect_cymysql(MySQLDialect_mysqldb):
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
dbapi_con = connection.connection
|
||||
version = []
|
||||
r = re.compile('[.\-]')
|
||||
for n in r.split(dbapi_con.server_version):
|
||||
try:
|
||||
version.append(int(n))
|
||||
except ValueError:
|
||||
version.append(n)
|
||||
version = [int(v) for v in dbapi_con.server_version.split('.')]
|
||||
return tuple(version)
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
|
@ -52,11 +52,8 @@ class _myconnpyBIT(BIT):
|
||||
|
||||
class MySQLDialect_mysqlconnector(MySQLDialect):
|
||||
driver = 'mysqlconnector'
|
||||
|
||||
if util.py2k:
|
||||
supports_unicode_statements = False
|
||||
supports_unicode_statements = True
|
||||
supports_unicode_binds = True
|
||||
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = True
|
||||
|
||||
|
@ -48,8 +48,7 @@ from ...connectors.mysqldb import (
|
||||
MySQLDBIdentifierPreparer,
|
||||
MySQLDBConnector
|
||||
)
|
||||
from .base import TEXT
|
||||
from ... import sql
|
||||
|
||||
|
||||
class MySQLExecutionContext_mysqldb(MySQLDBExecutionContext, MySQLExecutionContext):
|
||||
pass
|
||||
@ -68,27 +67,4 @@ class MySQLDialect_mysqldb(MySQLDBConnector, MySQLDialect):
|
||||
statement_compiler = MySQLCompiler_mysqldb
|
||||
preparer = MySQLIdentifierPreparer_mysqldb
|
||||
|
||||
def _check_unicode_returns(self, connection):
|
||||
# work around issue fixed in
|
||||
# https://github.com/farcepest/MySQLdb1/commit/cd44524fef63bd3fcb71947392326e9742d520e8
|
||||
# specific issue w/ the utf8_bin collation and unicode returns
|
||||
|
||||
has_utf8_bin = connection.scalar(
|
||||
"show collation where %s = 'utf8' and %s = 'utf8_bin'"
|
||||
% (
|
||||
self.identifier_preparer.quote("Charset"),
|
||||
self.identifier_preparer.quote("Collation")
|
||||
))
|
||||
if has_utf8_bin:
|
||||
additional_tests = [
|
||||
sql.collate(sql.cast(
|
||||
sql.literal_column(
|
||||
"'test collated returns'"),
|
||||
TEXT(charset='utf8')), "utf8_bin")
|
||||
]
|
||||
else:
|
||||
additional_tests = []
|
||||
return super(MySQLDBConnector, self)._check_unicode_returns(
|
||||
connection, additional_tests)
|
||||
|
||||
dialect = MySQLDialect_mysqldb
|
||||
|
@ -55,10 +55,10 @@ class MySQLExecutionContext_oursql(MySQLExecutionContext):
|
||||
|
||||
class MySQLDialect_oursql(MySQLDialect):
|
||||
driver = 'oursql'
|
||||
|
||||
if util.py2k:
|
||||
supports_unicode_binds = True
|
||||
supports_unicode_statements = True
|
||||
# Py2K
|
||||
supports_unicode_binds = True
|
||||
supports_unicode_statements = True
|
||||
# end Py2K
|
||||
|
||||
supports_native_decimal = True
|
||||
|
||||
@ -90,11 +90,12 @@ class MySQLDialect_oursql(MySQLDialect):
|
||||
connection.cursor().execute('BEGIN', plain_query=True)
|
||||
|
||||
def _xa_query(self, connection, query, xid):
|
||||
if util.py2k:
|
||||
arg = connection.connection._escape_string(xid)
|
||||
else:
|
||||
charset = self._connection_charset
|
||||
arg = connection.connection._escape_string(xid.encode(charset)).decode(charset)
|
||||
# Py2K
|
||||
arg = connection.connection._escape_string(xid)
|
||||
# end Py2K
|
||||
# Py3K
|
||||
# charset = self._connection_charset
|
||||
# arg = connection.connection._escape_string(xid.encode(charset)).decode(charset)
|
||||
arg = "'%s'" % arg
|
||||
connection.execution_options(_oursql_plain_query=True).execute(query % arg)
|
||||
|
||||
|
@ -19,7 +19,7 @@ SQLAlchemy zxjdbc dialects pass unicode straight through to the
|
||||
zxjdbc/JDBC layer. To allow multiple character sets to be sent from the
|
||||
MySQL Connector/J JDBC driver, by default SQLAlchemy sets its
|
||||
``characterEncoding`` connection property to ``UTF-8``. It may be
|
||||
overridden via a ``create_engine`` URL parameter.
|
||||
overriden via a ``create_engine`` URL parameter.
|
||||
|
||||
"""
|
||||
import re
|
||||
@ -37,7 +37,7 @@ class _ZxJDBCBit(BIT):
|
||||
return value
|
||||
if isinstance(value, bool):
|
||||
return int(value)
|
||||
v = 0
|
||||
v = 0L
|
||||
for i in value:
|
||||
v = v << 8 | (i & 0xff)
|
||||
value = v
|
||||
|
@ -9,14 +9,14 @@ from sqlalchemy.dialects.oracle import base, cx_oracle, zxjdbc
|
||||
base.dialect = cx_oracle.dialect
|
||||
|
||||
from sqlalchemy.dialects.oracle.base import \
|
||||
VARCHAR, NVARCHAR, CHAR, DATE, NUMBER,\
|
||||
VARCHAR, NVARCHAR, CHAR, DATE, DATETIME, NUMBER,\
|
||||
BLOB, BFILE, CLOB, NCLOB, TIMESTAMP, RAW,\
|
||||
FLOAT, DOUBLE_PRECISION, LONG, dialect, INTERVAL,\
|
||||
VARCHAR2, NVARCHAR2, ROWID, dialect
|
||||
|
||||
|
||||
__all__ = (
|
||||
'VARCHAR', 'NVARCHAR', 'CHAR', 'DATE', 'NUMBER',
|
||||
'VARCHAR', 'NVARCHAR', 'CHAR', 'DATE', 'DATETIME', 'NUMBER',
|
||||
'BLOB', 'BFILE', 'CLOB', 'NCLOB', 'TIMESTAMP', 'RAW',
|
||||
'FLOAT', 'DOUBLE_PRECISION', 'LONG', 'dialect', 'INTERVAL',
|
||||
'VARCHAR2', 'NVARCHAR2', 'ROWID'
|
||||
|
@ -16,12 +16,12 @@ Connect Arguments
|
||||
The dialect supports several :func:`~sqlalchemy.create_engine()` arguments which
|
||||
affect the behavior of the dialect regardless of driver in use.
|
||||
|
||||
* ``use_ansi`` - Use ANSI JOIN constructs (see the section on Oracle 8). Defaults
|
||||
* *use_ansi* - Use ANSI JOIN constructs (see the section on Oracle 8). Defaults
|
||||
to ``True``. If ``False``, Oracle-8 compatible constructs are used for joins.
|
||||
|
||||
* ``optimize_limits`` - defaults to ``False``. see the section on LIMIT/OFFSET.
|
||||
* *optimize_limits* - defaults to ``False``. see the section on LIMIT/OFFSET.
|
||||
|
||||
* ``use_binds_for_limits`` - defaults to ``True``. see the section on LIMIT/OFFSET.
|
||||
* *use_binds_for_limits* - defaults to ``True``. see the section on LIMIT/OFFSET.
|
||||
|
||||
Auto Increment Behavior
|
||||
-----------------------
|
||||
@ -59,6 +59,20 @@ against data dictionary data received from Oracle, so unless identifier names ha
|
||||
truly created as case sensitive (i.e. using quoted names), all lowercase names should be
|
||||
used on the SQLAlchemy side.
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
.. versionchanged:: 0.6
|
||||
SQLAlchemy uses the "native unicode" mode provided as of cx_oracle 5.
|
||||
cx_oracle 5.0.2 or greater is recommended for support of NCLOB.
|
||||
If not using cx_oracle 5, the NLS_LANG environment variable needs
|
||||
to be set in order for the oracle client library to use proper encoding,
|
||||
such as "AMERICAN_AMERICA.UTF8".
|
||||
|
||||
Also note that Oracle supports unicode data through the NVARCHAR and NCLOB data types.
|
||||
When using the SQLAlchemy Unicode and UnicodeText types, these DDL types will be used
|
||||
within CREATE TABLE statements. Usage of VARCHAR2 and CLOB with unicode text still
|
||||
requires NLS_LANG to be set.
|
||||
|
||||
LIMIT/OFFSET Support
|
||||
--------------------
|
||||
@ -85,43 +99,6 @@ http://www.sqlalchemy.org/trac/wiki/UsageRecipes/WindowFunctionsByDefault
|
||||
which installs a select compiler that overrides the generation of limit/offset with
|
||||
a window function.
|
||||
|
||||
.. _oracle_returning:
|
||||
|
||||
RETURNING Support
|
||||
-----------------
|
||||
|
||||
The Oracle database supports a limited form of RETURNING, in order to retrieve result
|
||||
sets of matched rows from INSERT, UPDATE and DELETE statements. Oracle's
|
||||
RETURNING..INTO syntax only supports one row being returned, as it relies upon
|
||||
OUT parameters in order to function. In addition, supported DBAPIs have further
|
||||
limitations (see :ref:`cx_oracle_returning`).
|
||||
|
||||
SQLAlchemy's "implicit returning" feature, which employs RETURNING within an INSERT
|
||||
and sometimes an UPDATE statement in order to fetch newly generated primary key values
|
||||
and other SQL defaults and expressions, is normally enabled on the Oracle
|
||||
backend. By default, "implicit returning" typically only fetches the value of a
|
||||
single ``nextval(some_seq)`` expression embedded into an INSERT in order to increment
|
||||
a sequence within an INSERT statement and get the value back at the same time.
|
||||
To disable this feature across the board, specify ``implicit_returning=False`` to
|
||||
:func:`.create_engine`::
|
||||
|
||||
engine = create_engine("oracle://scott:tiger@dsn", implicit_returning=False)
|
||||
|
||||
Implicit returning can also be disabled on a table-by-table basis as a table option::
|
||||
|
||||
# Core Table
|
||||
my_table = Table("my_table", metadata, ..., implicit_returning=False)
|
||||
|
||||
|
||||
# declarative
|
||||
class MyClass(Base):
|
||||
__tablename__ = 'my_table'
|
||||
__table_args__ = {"implicit_returning": False}
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`cx_oracle_returning` - additional cx_oracle-specific restrictions on implicit returning.
|
||||
|
||||
ON UPDATE CASCADE
|
||||
-----------------
|
||||
|
||||
@ -157,42 +134,9 @@ Synonym/DBLINK Reflection
|
||||
|
||||
When using reflection with Table objects, the dialect can optionally search for tables
|
||||
indicated by synonyms, either in local or remote schemas or accessed over DBLINK,
|
||||
by passing the flag ``oracle_resolve_synonyms=True`` as a
|
||||
keyword argument to the :class:`.Table` construct::
|
||||
|
||||
some_table = Table('some_table', autoload=True,
|
||||
autoload_with=some_engine,
|
||||
oracle_resolve_synonyms=True)
|
||||
|
||||
When this flag is set, the given name (such as ``some_table`` above) will
|
||||
be searched not just in the ``ALL_TABLES`` view, but also within the
|
||||
``ALL_SYNONYMS`` view to see if this name is actually a synonym to another name.
|
||||
If the synonym is located and refers to a DBLINK, the oracle dialect knows
|
||||
how to locate the table's information using DBLINK syntax (e.g. ``@dblink``).
|
||||
|
||||
``oracle_resolve_synonyms`` is accepted wherever reflection arguments are
|
||||
accepted, including methods such as :meth:`.MetaData.reflect` and
|
||||
:meth:`.Inspector.get_columns`.
|
||||
|
||||
If synonyms are not in use, this flag should be left disabled.
|
||||
|
||||
DateTime Compatibility
|
||||
----------------------
|
||||
|
||||
Oracle has no datatype known as ``DATETIME``, it instead has only ``DATE``,
|
||||
which can actually store a date and time value. For this reason, the Oracle
|
||||
dialect provides a type :class:`.oracle.DATE` which is a subclass of
|
||||
:class:`.DateTime`. This type has no special behavior, and is only
|
||||
present as a "marker" for this type; additionally, when a database column
|
||||
is reflected and the type is reported as ``DATE``, the time-supporting
|
||||
:class:`.oracle.DATE` type is used.
|
||||
|
||||
.. versionchanged:: 0.9.4 Added :class:`.oracle.DATE` to subclass
|
||||
:class:`.DateTime`. This is a change as previous versions
|
||||
would reflect a ``DATE`` column as :class:`.types.DATE`, which subclasses
|
||||
:class:`.Date`. The only significance here is for schemes that are
|
||||
examining the type of column for use in special Python translations or
|
||||
for migrating schemas to other database backends.
|
||||
by passing the flag oracle_resolve_synonyms=True as a
|
||||
keyword argument to the Table construct. If synonyms are not in use
|
||||
this flag should be left off.
|
||||
|
||||
"""
|
||||
|
||||
@ -202,8 +146,8 @@ from sqlalchemy import util, sql
|
||||
from sqlalchemy.engine import default, base, reflection
|
||||
from sqlalchemy.sql import compiler, visitors, expression
|
||||
from sqlalchemy.sql import operators as sql_operators, functions as sql_functions
|
||||
from sqlalchemy import types as sqltypes, schema as sa_schema
|
||||
from sqlalchemy.types import VARCHAR, NVARCHAR, CHAR, \
|
||||
from sqlalchemy import types as sqltypes
|
||||
from sqlalchemy.types import VARCHAR, NVARCHAR, CHAR, DATE, DATETIME, \
|
||||
BLOB, CLOB, TIMESTAMP, FLOAT
|
||||
|
||||
RESERVED_WORDS = \
|
||||
@ -276,22 +220,6 @@ class BFILE(sqltypes.LargeBinary):
|
||||
class LONG(sqltypes.Text):
|
||||
__visit_name__ = 'LONG'
|
||||
|
||||
class DATE(sqltypes.DateTime):
|
||||
"""Provide the oracle DATE type.
|
||||
|
||||
This type has no special Python behavior, except that it subclasses
|
||||
:class:`.types.DateTime`; this is to suit the fact that the Oracle
|
||||
``DATE`` type supports a time value.
|
||||
|
||||
.. versionadded:: 0.9.4
|
||||
|
||||
"""
|
||||
__visit_name__ = 'DATE'
|
||||
|
||||
|
||||
def _compare_type_affinity(self, other):
|
||||
return other._type_affinity in (sqltypes.DateTime, sqltypes.Date)
|
||||
|
||||
|
||||
class INTERVAL(sqltypes.TypeEngine):
|
||||
__visit_name__ = 'INTERVAL'
|
||||
@ -340,7 +268,6 @@ class _OracleBoolean(sqltypes.Boolean):
|
||||
colspecs = {
|
||||
sqltypes.Boolean: _OracleBoolean,
|
||||
sqltypes.Interval: INTERVAL,
|
||||
sqltypes.DateTime: DATE
|
||||
}
|
||||
|
||||
ischema_names = {
|
||||
@ -536,13 +463,8 @@ class OracleCompiler(compiler.SQLCompiler):
|
||||
return compiler.SQLCompiler.visit_join(self, join, **kwargs)
|
||||
else:
|
||||
kwargs['asfrom'] = True
|
||||
if isinstance(join.right, expression.FromGrouping):
|
||||
right = join.right.element
|
||||
else:
|
||||
right = join.right
|
||||
return self.process(join.left, **kwargs) + \
|
||||
", " + self.process(right, **kwargs)
|
||||
|
||||
", " + self.process(join.right, **kwargs)
|
||||
|
||||
def _get_nonansi_join_whereclause(self, froms):
|
||||
clauses = []
|
||||
@ -551,9 +473,9 @@ class OracleCompiler(compiler.SQLCompiler):
|
||||
if join.isouter:
|
||||
def visit_binary(binary):
|
||||
if binary.operator == sql_operators.eq:
|
||||
if join.right.is_derived_from(binary.left.table):
|
||||
if binary.left.table is join.right:
|
||||
binary.left = _OuterJoinColumn(binary.left)
|
||||
elif join.right.is_derived_from(binary.right.table):
|
||||
elif binary.right.table is join.right:
|
||||
binary.right = _OuterJoinColumn(binary.right)
|
||||
clauses.append(visitors.cloned_traverse(join.onclause, {},
|
||||
{'binary': visit_binary}))
|
||||
@ -563,8 +485,6 @@ class OracleCompiler(compiler.SQLCompiler):
|
||||
for j in join.left, join.right:
|
||||
if isinstance(j, expression.Join):
|
||||
visit_join(j)
|
||||
elif isinstance(j, expression.FromGrouping):
|
||||
visit_join(j.element)
|
||||
|
||||
for f in froms:
|
||||
if isinstance(f, expression.Join):
|
||||
@ -597,6 +517,7 @@ class OracleCompiler(compiler.SQLCompiler):
|
||||
return self.process(alias.original, **kwargs)
|
||||
|
||||
def returning_clause(self, stmt, returning_cols):
|
||||
|
||||
columns = []
|
||||
binds = []
|
||||
for i, column in enumerate(expression._select_iterables(returning_cols)):
|
||||
@ -670,7 +591,7 @@ class OracleCompiler(compiler.SQLCompiler):
|
||||
|
||||
# If needed, add the ora_rn, and wrap again with offset.
|
||||
if select._offset is None:
|
||||
limitselect._for_update_arg = select._for_update_arg
|
||||
limitselect.for_update = select.for_update
|
||||
select = limitselect
|
||||
else:
|
||||
limitselect = limitselect.column(
|
||||
@ -689,7 +610,7 @@ class OracleCompiler(compiler.SQLCompiler):
|
||||
offsetselect.append_whereclause(
|
||||
sql.literal_column("ora_rn") > offset_value)
|
||||
|
||||
offsetselect._for_update_arg = select._for_update_arg
|
||||
offsetselect.for_update = select.for_update
|
||||
select = offsetselect
|
||||
|
||||
kwargs['iswrapper'] = getattr(select, '_is_wrapper', False)
|
||||
@ -701,19 +622,10 @@ class OracleCompiler(compiler.SQLCompiler):
|
||||
def for_update_clause(self, select):
|
||||
if self.is_subquery():
|
||||
return ""
|
||||
|
||||
tmp = ' FOR UPDATE'
|
||||
|
||||
if select._for_update_arg.of:
|
||||
tmp += ' OF ' + ', '.join(
|
||||
self.process(elem) for elem in
|
||||
select._for_update_arg.of
|
||||
)
|
||||
|
||||
if select._for_update_arg.nowait:
|
||||
tmp += " NOWAIT"
|
||||
|
||||
return tmp
|
||||
elif select.for_update == "nowait":
|
||||
return " FOR UPDATE NOWAIT"
|
||||
else:
|
||||
return super(OracleCompiler, self).for_update_clause(select)
|
||||
|
||||
|
||||
class OracleDDLCompiler(compiler.DDLCompiler):
|
||||
@ -741,14 +653,14 @@ class OracleDDLCompiler(compiler.DDLCompiler):
|
||||
class OracleIdentifierPreparer(compiler.IdentifierPreparer):
|
||||
|
||||
reserved_words = set([x.lower() for x in RESERVED_WORDS])
|
||||
illegal_initial_characters = set(range(0, 10)).union(["_", "$"])
|
||||
illegal_initial_characters = set(xrange(0, 10)).union(["_", "$"])
|
||||
|
||||
def _bindparam_requires_quotes(self, value):
|
||||
"""Return True if the given identifier requires quoting."""
|
||||
lc_value = value.lower()
|
||||
return (lc_value in self.reserved_words
|
||||
or value[0] in self.illegal_initial_characters
|
||||
or not self.legal_characters.match(util.text_type(value))
|
||||
or not self.legal_characters.match(unicode(value))
|
||||
)
|
||||
|
||||
def format_savepoint(self, savepoint):
|
||||
@ -792,10 +704,6 @@ class OracleDialect(default.DefaultDialect):
|
||||
|
||||
reflection_options = ('oracle_resolve_synonyms', )
|
||||
|
||||
construct_arguments = [
|
||||
(sa_schema.Table, {"resolve_synonyms": False})
|
||||
]
|
||||
|
||||
def __init__(self,
|
||||
use_ansi=True,
|
||||
optimize_limits=False,
|
||||
@ -856,9 +764,10 @@ class OracleDialect(default.DefaultDialect):
|
||||
def normalize_name(self, name):
|
||||
if name is None:
|
||||
return None
|
||||
if util.py2k:
|
||||
if isinstance(name, str):
|
||||
name = name.decode(self.encoding)
|
||||
# Py2K
|
||||
if isinstance(name, str):
|
||||
name = name.decode(self.encoding)
|
||||
# end Py2K
|
||||
if name.upper() == name and \
|
||||
not self.identifier_preparer._requires_quotes(name.lower()):
|
||||
return name.lower()
|
||||
@ -870,15 +779,16 @@ class OracleDialect(default.DefaultDialect):
|
||||
return None
|
||||
elif name.lower() == name and not self.identifier_preparer._requires_quotes(name.lower()):
|
||||
name = name.upper()
|
||||
if util.py2k:
|
||||
if not self.supports_unicode_binds:
|
||||
name = name.encode(self.encoding)
|
||||
else:
|
||||
name = unicode(name)
|
||||
# Py2K
|
||||
if not self.supports_unicode_binds:
|
||||
name = name.encode(self.encoding)
|
||||
else:
|
||||
name = unicode(name)
|
||||
# end Py2K
|
||||
return name
|
||||
|
||||
def _get_default_schema_name(self, connection):
|
||||
return self.normalize_name(connection.execute('SELECT USER FROM DUAL').scalar())
|
||||
return self.normalize_name(connection.execute(u'SELECT USER FROM DUAL').scalar())
|
||||
|
||||
def _resolve_synonym(self, connection, desired_owner=None, desired_synonym=None, desired_table=None):
|
||||
"""search for a local synonym matching the given desired owner/name.
|
||||
@ -961,7 +871,7 @@ class OracleDialect(default.DefaultDialect):
|
||||
def get_table_names(self, connection, schema=None, **kw):
|
||||
schema = self.denormalize_name(schema or self.default_schema_name)
|
||||
|
||||
# note that table_names() isn't loading DBLINKed or synonym'ed tables
|
||||
# note that table_names() isnt loading DBLINKed or synonym'ed tables
|
||||
if schema is None:
|
||||
schema = self.default_schema_name
|
||||
s = sql.text(
|
||||
@ -1257,7 +1167,7 @@ class OracleDialect(default.DefaultDialect):
|
||||
local_cols.append(local_column)
|
||||
remote_cols.append(remote_column)
|
||||
|
||||
return list(fkeys.values())
|
||||
return fkeys.values()
|
||||
|
||||
@reflection.cache
|
||||
def get_view_definition(self, connection, view_name, schema=None,
|
||||
@ -1277,9 +1187,7 @@ class OracleDialect(default.DefaultDialect):
|
||||
|
||||
rp = connection.execute(sql.text(text), **params).scalar()
|
||||
if rp:
|
||||
if util.py2k:
|
||||
rp = rp.decode(self.encoding)
|
||||
return rp
|
||||
return rp.decode(self.encoding)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
@ -17,32 +17,26 @@ Additional Connect Arguments
|
||||
|
||||
When connecting with ``dbname`` present, the host, port, and dbname tokens are
|
||||
converted to a TNS name using
|
||||
the cx_oracle ``makedsn()`` function. Otherwise, the host token is taken
|
||||
the cx_oracle :func:`makedsn()` function. Otherwise, the host token is taken
|
||||
directly as a TNS name.
|
||||
|
||||
Additional arguments which may be specified either as query string arguments
|
||||
on the URL, or as keyword arguments to :func:`.create_engine()` are:
|
||||
on the URL, or as keyword arguments to :func:`~sqlalchemy.create_engine()` are:
|
||||
|
||||
* ``allow_twophase`` - enable two-phase transactions. Defaults to ``True``.
|
||||
* allow_twophase - enable two-phase transactions. Defaults to ``True``.
|
||||
|
||||
* ``arraysize`` - set the cx_oracle.arraysize value on cursors, defaulted
|
||||
to 50. This setting is significant with cx_Oracle as the contents of LOB
|
||||
objects are only readable within a "live" row (e.g. within a batch of
|
||||
50 rows).
|
||||
* arraysize - set the cx_oracle.arraysize value on cursors, in SQLAlchemy
|
||||
it defaults to 50. See the section on "LOB Objects" below.
|
||||
|
||||
* ``auto_convert_lobs`` - defaults to True; See :ref:`cx_oracle_lob`.
|
||||
* auto_convert_lobs - defaults to True, see the section on LOB objects.
|
||||
|
||||
* ``auto_setinputsizes`` - the cx_oracle.setinputsizes() call is issued for
|
||||
* auto_setinputsizes - the cx_oracle.setinputsizes() call is issued for
|
||||
all bind parameters. This is required for LOB datatypes but can be
|
||||
disabled to reduce overhead. Defaults to ``True``. Specific types
|
||||
can be excluded from this process using the ``exclude_setinputsizes``
|
||||
parameter.
|
||||
|
||||
* ``coerce_to_unicode`` - see :ref:`cx_oracle_unicode` for detail.
|
||||
|
||||
* ``coerce_to_decimal`` - see :ref:`cx_oracle_numeric` for detail.
|
||||
|
||||
* ``exclude_setinputsizes`` - a tuple or list of string DBAPI type names to
|
||||
* exclude_setinputsizes - a tuple or list of string DBAPI type names to
|
||||
be excluded from the "auto setinputsizes" feature. The type names here
|
||||
must match DBAPI types that are found in the "cx_Oracle" module namespace,
|
||||
such as cx_Oracle.UNICODE, cx_Oracle.NCLOB, etc. Defaults to
|
||||
@ -51,106 +45,25 @@ on the URL, or as keyword arguments to :func:`.create_engine()` are:
|
||||
.. versionadded:: 0.8 specific DBAPI types can be excluded from the
|
||||
auto_setinputsizes feature via the exclude_setinputsizes attribute.
|
||||
|
||||
* ``mode`` - This is given the string value of SYSDBA or SYSOPER, or alternatively
|
||||
* mode - This is given the string value of SYSDBA or SYSOPER, or alternatively
|
||||
an integer value. This value is only available as a URL query string
|
||||
argument.
|
||||
|
||||
* ``threaded`` - enable multithreaded access to cx_oracle connections. Defaults
|
||||
* threaded - enable multithreaded access to cx_oracle connections. Defaults
|
||||
to ``True``. Note that this is the opposite default of the cx_Oracle DBAPI
|
||||
itself.
|
||||
|
||||
.. _cx_oracle_unicode:
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
The cx_Oracle DBAPI as of version 5 fully supports unicode, and has the ability
|
||||
to return string results as Python unicode objects natively.
|
||||
cx_oracle 5 fully supports Python unicode objects. SQLAlchemy will pass
|
||||
all unicode strings directly to cx_oracle, and additionally uses an output
|
||||
handler so that all string based result values are returned as unicode as well.
|
||||
Generally, the ``NLS_LANG`` environment variable determines the nature
|
||||
of the encoding to be used.
|
||||
|
||||
When used in Python 3, cx_Oracle returns all strings as Python unicode objects
|
||||
(that is, plain ``str`` in Python 3). In Python 2, it will return as Python
|
||||
unicode those column values that are of type ``NVARCHAR`` or ``NCLOB``. For
|
||||
column values that are of type ``VARCHAR`` or other non-unicode string types,
|
||||
it will return values as Python strings (e.g. bytestrings).
|
||||
|
||||
The cx_Oracle SQLAlchemy dialect presents two different options for the use case of
|
||||
returning ``VARCHAR`` column values as Python unicode objects under Python 2:
|
||||
|
||||
* the cx_Oracle DBAPI has the ability to coerce all string results to Python
|
||||
unicode objects unconditionally using output type handlers. This has
|
||||
the advantage that the unicode conversion is global to all statements
|
||||
at the cx_Oracle driver level, meaning it works with raw textual SQL
|
||||
statements that have no typing information associated. However, this system
|
||||
has been observed to incur signfiicant performance overhead, not only because
|
||||
it takes effect for all string values unconditionally, but also because cx_Oracle under
|
||||
Python 2 seems to use a pure-Python function call in order to do the
|
||||
decode operation, which under cPython can orders of magnitude slower
|
||||
than doing it using C functions alone.
|
||||
|
||||
* SQLAlchemy has unicode-decoding services built in, and when using SQLAlchemy's
|
||||
C extensions, these functions do not use any Python function calls and
|
||||
are very fast. The disadvantage to this approach is that the unicode
|
||||
conversion only takes effect for statements where the :class:`.Unicode` type
|
||||
or :class:`.String` type with ``convert_unicode=True`` is explicitly
|
||||
associated with the result column. This is the case for any ORM or Core
|
||||
query or SQL expression as well as for a :func:`.text` construct that specifies
|
||||
output column types, so in the vast majority of cases this is not an issue.
|
||||
However, when sending a completely raw string to :meth:`.Connection.execute`,
|
||||
this typing information isn't present, unless the string is handled
|
||||
within a :func:`.text` construct that adds typing information.
|
||||
|
||||
As of version 0.9.2 of SQLAlchemy, the default approach is to use SQLAlchemy's
|
||||
typing system. This keeps cx_Oracle's expensive Python 2 approach
|
||||
disabled unless the user explicitly wants it. Under Python 3, SQLAlchemy detects
|
||||
that cx_Oracle is returning unicode objects natively and cx_Oracle's system
|
||||
is used.
|
||||
|
||||
To re-enable cx_Oracle's output type handler under Python 2, the
|
||||
``coerce_to_unicode=True`` flag (new in 0.9.4) can be passed to
|
||||
:func:`.create_engine`::
|
||||
|
||||
engine = create_engine("oracle+cx_oracle://dsn", coerce_to_unicode=True)
|
||||
|
||||
Alternatively, to run a pure string SQL statement and get ``VARCHAR`` results
|
||||
as Python unicode under Python 2 without using cx_Oracle's native handlers,
|
||||
the :func:`.text` feature can be used::
|
||||
|
||||
from sqlalchemy import text, Unicode
|
||||
result = conn.execute(text("select username from user").columns(username=Unicode))
|
||||
|
||||
.. versionchanged:: 0.9.2 cx_Oracle's outputtypehandlers are no longer used for
|
||||
unicode results of non-unicode datatypes in Python 2, after they were identified as a major
|
||||
performance bottleneck. SQLAlchemy's own unicode facilities are used
|
||||
instead.
|
||||
|
||||
.. versionadded:: 0.9.4 Added the ``coerce_to_unicode`` flag, to re-enable
|
||||
cx_Oracle's outputtypehandler and revert to pre-0.9.2 behavior.
|
||||
|
||||
.. _cx_oracle_returning:
|
||||
|
||||
RETURNING Support
|
||||
-----------------
|
||||
|
||||
The cx_oracle DBAPI supports a limited subset of Oracle's already limited RETURNING support.
|
||||
Typically, results can only be guaranteed for at most one column being returned;
|
||||
this is the typical case when SQLAlchemy uses RETURNING to get just the value of a
|
||||
primary-key-associated sequence value. Additional column expressions will
|
||||
cause problems in a non-determinative way, due to cx_oracle's lack of support for
|
||||
the OCI_DATA_AT_EXEC API which is required for more complex RETURNING scenarios.
|
||||
|
||||
For this reason, stability may be enhanced by disabling RETURNING support completely;
|
||||
SQLAlchemy otherwise will use RETURNING to fetch newly sequence-generated
|
||||
primary keys. As illustrated in :ref:`oracle_returning`::
|
||||
|
||||
engine = create_engine("oracle://scott:tiger@dsn", implicit_returning=False)
|
||||
|
||||
.. seealso::
|
||||
|
||||
http://docs.oracle.com/cd/B10501_01/appdev.920/a96584/oci05bnd.htm#420693 - OCI documentation for RETURNING
|
||||
|
||||
http://sourceforge.net/mailarchive/message.php?msg_id=31338136 - cx_oracle developer commentary
|
||||
|
||||
.. _cx_oracle_lob:
|
||||
Note that this behavior is disabled when Oracle 8 is detected, as it has been
|
||||
observed that issues remain when passing Python unicodes to cx_oracle with Oracle 8.
|
||||
|
||||
LOB Objects
|
||||
-----------
|
||||
@ -162,7 +75,7 @@ like result.fetchmany() and result.fetchall(). This means that by default, LOB
|
||||
objects are fully fetched unconditionally by SQLAlchemy, and the linkage to a live
|
||||
cursor is broken.
|
||||
|
||||
To disable this processing, pass ``auto_convert_lobs=False`` to :func:`.create_engine()`.
|
||||
To disable this processing, pass ``auto_convert_lobs=False`` to :func:`create_engine()`.
|
||||
|
||||
Two Phase Transaction Support
|
||||
-----------------------------
|
||||
@ -195,13 +108,12 @@ the application can make one of several choices:
|
||||
|
||||
* For ad-hoc two-phase operations without disabling pooling, the DBAPI
|
||||
connection in use can be evicted from the connection pool using the
|
||||
:meth:`.Connection.detach` method.
|
||||
:class:`.Connection.detach` method.
|
||||
|
||||
.. versionchanged:: 0.8.0b2,0.7.10
|
||||
Support for cx_oracle prepared transactions has been implemented
|
||||
and tested.
|
||||
|
||||
.. _cx_oracle_numeric:
|
||||
|
||||
Precision Numerics
|
||||
------------------
|
||||
@ -224,7 +136,8 @@ If precision numerics aren't required, the decimal handling
|
||||
can be disabled by passing the flag ``coerce_to_decimal=False``
|
||||
to :func:`.create_engine`::
|
||||
|
||||
engine = create_engine("oracle+cx_oracle://dsn", coerce_to_decimal=False)
|
||||
engine = create_engine("oracle+cx_oracle://dsn",
|
||||
coerce_to_decimal=False)
|
||||
|
||||
.. versionadded:: 0.7.6
|
||||
Add the ``coerce_to_decimal`` flag.
|
||||
@ -255,15 +168,15 @@ The "decimal point is present" logic itself is also sensitive to
|
||||
locale. Under OCI_, this is controlled by the NLS_LANG
|
||||
environment variable. Upon first connection, the dialect runs a
|
||||
test to determine the current "decimal" character, which can be
|
||||
a comma "," for European locales. From that point forward the
|
||||
a comma "," for european locales. From that point forward the
|
||||
outputtypehandler uses that character to represent a decimal
|
||||
point. Note that cx_oracle 5.0.3 or greater is required
|
||||
when dealing with numerics with locale settings that don't use
|
||||
a period "." as the decimal character.
|
||||
|
||||
.. versionchanged:: 0.6.6
|
||||
The outputtypehandler supports the case where the locale uses a
|
||||
comma "," character to represent a decimal point.
|
||||
The outputtypehandler uses a comma "," character to represent
|
||||
a decimal point.
|
||||
|
||||
.. _OCI: http://www.oracle.com/technetwork/database/features/oci/index.html
|
||||
|
||||
@ -298,7 +211,10 @@ class _OracleNumeric(sqltypes.Numeric):
|
||||
|
||||
if dialect.supports_native_decimal:
|
||||
if self.asdecimal:
|
||||
fstring = "%%.%df" % self._effective_decimal_return_scale
|
||||
if self.scale is None:
|
||||
fstring = "%.10f"
|
||||
else:
|
||||
fstring = "%%.%df" % self.scale
|
||||
|
||||
def to_decimal(value):
|
||||
if value is None:
|
||||
@ -352,17 +268,20 @@ class _LOBMixin(object):
|
||||
|
||||
|
||||
class _NativeUnicodeMixin(object):
|
||||
if util.py2k:
|
||||
def bind_processor(self, dialect):
|
||||
if dialect._cx_oracle_with_unicode:
|
||||
def process(value):
|
||||
if value is None:
|
||||
return value
|
||||
else:
|
||||
return unicode(value)
|
||||
return process
|
||||
else:
|
||||
return super(_NativeUnicodeMixin, self).bind_processor(dialect)
|
||||
# Py3K
|
||||
#pass
|
||||
# Py2K
|
||||
def bind_processor(self, dialect):
|
||||
if dialect._cx_oracle_with_unicode:
|
||||
def process(value):
|
||||
if value is None:
|
||||
return value
|
||||
else:
|
||||
return unicode(value)
|
||||
return process
|
||||
else:
|
||||
return super(_NativeUnicodeMixin, self).bind_processor(dialect)
|
||||
# end Py2K
|
||||
|
||||
# we apply a connection output handler that returns
|
||||
# unicode in all cases, so the "native_unicode" flag
|
||||
@ -446,8 +365,7 @@ class _OracleRowid(oracle.ROWID):
|
||||
|
||||
|
||||
class OracleCompiler_cx_oracle(OracleCompiler):
|
||||
def bindparam_string(self, name, **kw):
|
||||
quote = getattr(name, 'quote', None)
|
||||
def bindparam_string(self, name, quote=None, **kw):
|
||||
if quote is True or quote is not False and \
|
||||
self.preparer._bindparam_requires_quotes(name):
|
||||
quoted_name = '"%s"' % name
|
||||
@ -575,11 +493,11 @@ class OracleExecutionContext_cx_oracle_with_unicode(OracleExecutionContext_cx_or
|
||||
"""
|
||||
def __init__(self, *arg, **kw):
|
||||
OracleExecutionContext_cx_oracle.__init__(self, *arg, **kw)
|
||||
self.statement = util.text_type(self.statement)
|
||||
self.statement = unicode(self.statement)
|
||||
|
||||
def _execute_scalar(self, stmt):
|
||||
return super(OracleExecutionContext_cx_oracle_with_unicode, self).\
|
||||
_execute_scalar(util.text_type(stmt))
|
||||
_execute_scalar(unicode(stmt))
|
||||
|
||||
|
||||
class ReturningResultProxy(_result.FullyBufferedResultProxy):
|
||||
@ -591,6 +509,7 @@ class ReturningResultProxy(_result.FullyBufferedResultProxy):
|
||||
|
||||
def _cursor_description(self):
|
||||
returning = self.context.compiled.returning
|
||||
|
||||
return [
|
||||
("ret_%d" % i, None)
|
||||
for i, col in enumerate(returning)
|
||||
@ -610,6 +529,7 @@ class OracleDialect_cx_oracle(OracleDialect):
|
||||
colspecs = colspecs = {
|
||||
sqltypes.Numeric: _OracleNumeric,
|
||||
sqltypes.Date: _OracleDate, # generic type, assume datetime.date is desired
|
||||
oracle.DATE: oracle.DATE, # non generic type - passthru
|
||||
sqltypes.LargeBinary: _OracleBinary,
|
||||
sqltypes.Boolean: oracle._OracleBoolean,
|
||||
sqltypes.Interval: _OracleInterval,
|
||||
@ -642,7 +562,6 @@ class OracleDialect_cx_oracle(OracleDialect):
|
||||
threaded=True,
|
||||
allow_twophase=True,
|
||||
coerce_to_decimal=True,
|
||||
coerce_to_unicode=False,
|
||||
arraysize=50, **kwargs):
|
||||
OracleDialect.__init__(self, **kwargs)
|
||||
self.threaded = threaded
|
||||
@ -671,11 +590,6 @@ class OracleDialect_cx_oracle(OracleDialect):
|
||||
self._cx_oracle_binary_types = types("BFILE", "CLOB", "NCLOB", "BLOB")
|
||||
self.supports_unicode_binds = self.cx_oracle_ver >= (5, 0)
|
||||
|
||||
self.coerce_to_unicode = (
|
||||
self.cx_oracle_ver >= (5, 0) and
|
||||
coerce_to_unicode
|
||||
)
|
||||
|
||||
self.supports_native_decimal = (
|
||||
self.cx_oracle_ver >= (5, 0) and
|
||||
coerce_to_decimal
|
||||
@ -693,23 +607,19 @@ class OracleDialect_cx_oracle(OracleDialect):
|
||||
self.supports_unicode_statements = True
|
||||
self.supports_unicode_binds = True
|
||||
self._cx_oracle_with_unicode = True
|
||||
|
||||
if util.py2k:
|
||||
# There's really no reason to run with WITH_UNICODE under Python 2.x.
|
||||
# Give the user a hint.
|
||||
util.warn(
|
||||
"cx_Oracle is compiled under Python 2.xx using the "
|
||||
"WITH_UNICODE flag. Consider recompiling cx_Oracle "
|
||||
"without this flag, which is in no way necessary for full "
|
||||
"support of Unicode. Otherwise, all string-holding bind "
|
||||
"parameters must be explicitly typed using SQLAlchemy's "
|
||||
"String type or one of its subtypes,"
|
||||
"or otherwise be passed as Python unicode. "
|
||||
"Plain Python strings passed as bind parameters will be "
|
||||
"silently corrupted by cx_Oracle."
|
||||
)
|
||||
self.execution_ctx_cls = \
|
||||
OracleExecutionContext_cx_oracle_with_unicode
|
||||
# Py2K
|
||||
# There's really no reason to run with WITH_UNICODE under Python 2.x.
|
||||
# Give the user a hint.
|
||||
util.warn("cx_Oracle is compiled under Python 2.xx using the "
|
||||
"WITH_UNICODE flag. Consider recompiling cx_Oracle without "
|
||||
"this flag, which is in no way necessary for full support of Unicode. "
|
||||
"Otherwise, all string-holding bind parameters must "
|
||||
"be explicitly typed using SQLAlchemy's String type or one of its subtypes,"
|
||||
"or otherwise be passed as Python unicode. Plain Python strings "
|
||||
"passed as bind parameters will be silently corrupted by cx_Oracle."
|
||||
)
|
||||
self.execution_ctx_cls = OracleExecutionContext_cx_oracle_with_unicode
|
||||
# end Py2K
|
||||
else:
|
||||
self._cx_oracle_with_unicode = False
|
||||
|
||||
@ -741,7 +651,7 @@ class OracleDialect_cx_oracle(OracleDialect):
|
||||
|
||||
def _detect_decimal_char(self, connection):
|
||||
"""detect if the decimal separator character is not '.', as
|
||||
is the case with European locale settings for NLS_LANG.
|
||||
is the case with european locale settings for NLS_LANG.
|
||||
|
||||
cx_oracle itself uses similar logic when it formats Python
|
||||
Decimal objects to strings on the bind side (as of 5.0.3),
|
||||
@ -820,9 +730,8 @@ class OracleDialect_cx_oracle(OracleDialect):
|
||||
outconverter=self._detect_decimal,
|
||||
arraysize=cursor.arraysize)
|
||||
# allow all strings to come back natively as Unicode
|
||||
elif self.coerce_to_unicode and \
|
||||
defaultType in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR):
|
||||
return cursor.var(util.text_type, size, cursor.arraysize)
|
||||
elif defaultType in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR):
|
||||
return cursor.var(unicode, size, cursor.arraysize)
|
||||
|
||||
def on_connect(conn):
|
||||
conn.outputtypehandler = output_type_handler
|
||||
@ -857,19 +766,20 @@ class OracleDialect_cx_oracle(OracleDialect):
|
||||
twophase=self.allow_twophase,
|
||||
)
|
||||
|
||||
if util.py2k:
|
||||
if self._cx_oracle_with_unicode:
|
||||
for k, v in opts.items():
|
||||
if isinstance(v, str):
|
||||
opts[k] = unicode(v)
|
||||
else:
|
||||
for k, v in opts.items():
|
||||
if isinstance(v, unicode):
|
||||
opts[k] = str(v)
|
||||
# Py2K
|
||||
if self._cx_oracle_with_unicode:
|
||||
for k, v in opts.items():
|
||||
if isinstance(v, str):
|
||||
opts[k] = unicode(v)
|
||||
else:
|
||||
for k, v in opts.items():
|
||||
if isinstance(v, unicode):
|
||||
opts[k] = str(v)
|
||||
# end Py2K
|
||||
|
||||
if 'mode' in url.query:
|
||||
opts['mode'] = url.query['mode']
|
||||
if isinstance(opts['mode'], util.string_types):
|
||||
if isinstance(opts['mode'], basestring):
|
||||
mode = opts['mode'].upper()
|
||||
if mode == 'SYSDBA':
|
||||
opts['mode'] = self.dbapi.SYSDBA
|
||||
@ -910,11 +820,6 @@ class OracleDialect_cx_oracle(OracleDialect):
|
||||
id = random.randint(0, 2 ** 128)
|
||||
return (0x1234, "%032x" % id, "%032x" % 9)
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context=None):
|
||||
if isinstance(parameters, tuple):
|
||||
parameters = list(parameters)
|
||||
cursor.executemany(statement, parameters)
|
||||
|
||||
def do_begin_twophase(self, connection, xid):
|
||||
connection.connection.begin(*xid)
|
||||
|
||||
|
@ -95,8 +95,8 @@ class OracleExecutionContext_zxjdbc(OracleExecutionContext):
|
||||
try:
|
||||
try:
|
||||
rrs = self.statement.__statement__.getReturnResultSet()
|
||||
next(rrs)
|
||||
except SQLException as sqle:
|
||||
rrs.next()
|
||||
except SQLException, sqle:
|
||||
msg = '%s [SQLCode: %d]' % (sqle.getMessage(), sqle.getErrorCode())
|
||||
if sqle.getSQLState() is not None:
|
||||
msg += ' [SQLState: %s]' % sqle.getSQLState()
|
||||
|
@ -11,11 +11,9 @@ base.dialect = psycopg2.dialect
|
||||
from .base import \
|
||||
INTEGER, BIGINT, SMALLINT, VARCHAR, CHAR, TEXT, NUMERIC, FLOAT, REAL, \
|
||||
INET, CIDR, UUID, BIT, MACADDR, DOUBLE_PRECISION, TIMESTAMP, TIME, \
|
||||
DATE, BYTEA, BOOLEAN, INTERVAL, ARRAY, ENUM, dialect, array, Any, All, \
|
||||
TSVECTOR
|
||||
DATE, BYTEA, BOOLEAN, INTERVAL, ARRAY, ENUM, dialect, array, Any, All
|
||||
from .constraints import ExcludeConstraint
|
||||
from .hstore import HSTORE, hstore
|
||||
from .json import JSON, JSONElement
|
||||
from .ranges import INT4RANGE, INT8RANGE, NUMRANGE, DATERANGE, TSRANGE, \
|
||||
TSTZRANGE
|
||||
|
||||
@ -25,5 +23,5 @@ __all__ = (
|
||||
'DOUBLE_PRECISION', 'TIMESTAMP', 'TIME', 'DATE', 'BYTEA', 'BOOLEAN',
|
||||
'INTERVAL', 'ARRAY', 'ENUM', 'dialect', 'Any', 'All', 'array', 'HSTORE',
|
||||
'hstore', 'INT4RANGE', 'INT8RANGE', 'NUMRANGE', 'DATERANGE',
|
||||
'TSRANGE', 'TSTZRANGE', 'json', 'JSON', 'JSONElement'
|
||||
'TSRANGE', 'TSTZRANGE'
|
||||
)
|
||||
|
@ -76,132 +76,36 @@ Valid values for ``isolation_level`` include:
|
||||
The :mod:`~sqlalchemy.dialects.postgresql.psycopg2` dialect also offers the special level ``AUTOCOMMIT``. See
|
||||
:ref:`psycopg2_isolation_level` for details.
|
||||
|
||||
.. _postgresql_schema_reflection:
|
||||
|
||||
Remote-Schema Table Introspection and Postgresql search_path
|
||||
------------------------------------------------------------
|
||||
Remote / Cross-Schema Table Introspection
|
||||
-----------------------------------------
|
||||
|
||||
The Postgresql dialect can reflect tables from any schema. The
|
||||
:paramref:`.Table.schema` argument, or alternatively the
|
||||
:paramref:`.MetaData.reflect.schema` argument determines which schema will
|
||||
be searched for the table or tables. The reflected :class:`.Table` objects
|
||||
will in all cases retain this ``.schema`` attribute as was specified. However,
|
||||
with regards to tables which these :class:`.Table` objects refer to via
|
||||
foreign key constraint, a decision must be made as to how the ``.schema``
|
||||
is represented in those remote tables, in the case where that remote
|
||||
schema name is also a member of the current
|
||||
`Postgresql search path <http://www.postgresql.org/docs/9.0/static/ddl-schemas.html#DDL-SCHEMAS-PATH>`_.
|
||||
Tables can be introspected from any accessible schema, including
|
||||
inter-schema foreign key relationships. However, care must be taken
|
||||
when specifying the "schema" argument for a given :class:`.Table`, when
|
||||
the given schema is also present in PostgreSQL's ``search_path`` variable
|
||||
for the current connection.
|
||||
|
||||
By default, the Postgresql dialect mimics the behavior encouraged by
|
||||
Postgresql's own ``pg_get_constraintdef()`` builtin procedure. This function
|
||||
returns a sample definition for a particular foreign key constraint,
|
||||
omitting the referenced schema name from that definition when the name is
|
||||
also in the Postgresql schema search path. The interaction below
|
||||
illustrates this behavior::
|
||||
If a FOREIGN KEY constraint reports that the remote table's schema is within
|
||||
the current ``search_path``, the "schema" attribute of the resulting
|
||||
:class:`.Table` will be set to ``None``, unless the actual schema of the
|
||||
remote table matches that of the referencing table, and the "schema" argument
|
||||
was explicitly stated on the referencing table.
|
||||
|
||||
test=> CREATE TABLE test_schema.referred(id INTEGER PRIMARY KEY);
|
||||
CREATE TABLE
|
||||
test=> CREATE TABLE referring(
|
||||
test(> id INTEGER PRIMARY KEY,
|
||||
test(> referred_id INTEGER REFERENCES test_schema.referred(id));
|
||||
CREATE TABLE
|
||||
test=> SET search_path TO public, test_schema;
|
||||
test=> SELECT pg_catalog.pg_get_constraintdef(r.oid, true) FROM
|
||||
test-> pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
|
||||
test-> JOIN pg_catalog.pg_constraint r ON c.oid = r.conrelid
|
||||
test-> WHERE c.relname='referring' AND r.contype = 'f'
|
||||
test-> ;
|
||||
pg_get_constraintdef
|
||||
---------------------------------------------------
|
||||
FOREIGN KEY (referred_id) REFERENCES referred(id)
|
||||
(1 row)
|
||||
The best practice here is to not use the ``schema`` argument
|
||||
on :class:`.Table` for any schemas that are present in ``search_path``.
|
||||
``search_path`` defaults to "public", but care should be taken
|
||||
to inspect the actual value using::
|
||||
|
||||
Above, we created a table ``referred`` as a member of the remote schema ``test_schema``, however
|
||||
when we added ``test_schema`` to the PG ``search_path`` and then asked ``pg_get_constraintdef()``
|
||||
for the ``FOREIGN KEY`` syntax, ``test_schema`` was not included in the
|
||||
output of the function.
|
||||
SHOW search_path;
|
||||
|
||||
On the other hand, if we set the search path back to the typical default
|
||||
of ``public``::
|
||||
.. versionchanged:: 0.7.3
|
||||
Prior to this version, cross-schema foreign keys when the schemas
|
||||
were also in the ``search_path`` could make an incorrect assumption
|
||||
if the schemas were explicitly stated on each :class:`.Table`.
|
||||
|
||||
test=> SET search_path TO public;
|
||||
SET
|
||||
|
||||
The same query against ``pg_get_constraintdef()`` now returns the fully
|
||||
schema-qualified name for us::
|
||||
|
||||
test=> SELECT pg_catalog.pg_get_constraintdef(r.oid, true) FROM
|
||||
test-> pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
|
||||
test-> JOIN pg_catalog.pg_constraint r ON c.oid = r.conrelid
|
||||
test-> WHERE c.relname='referring' AND r.contype = 'f';
|
||||
pg_get_constraintdef
|
||||
---------------------------------------------------------------
|
||||
FOREIGN KEY (referred_id) REFERENCES test_schema.referred(id)
|
||||
(1 row)
|
||||
|
||||
SQLAlchemy will by default use the return value of ``pg_get_constraintdef()``
|
||||
in order to determine the remote schema name. That is, if our ``search_path``
|
||||
were set to include ``test_schema``, and we invoked a table
|
||||
reflection process as follows::
|
||||
|
||||
>>> from sqlalchemy import Table, MetaData, create_engine
|
||||
>>> engine = create_engine("postgresql://scott:tiger@localhost/test")
|
||||
>>> with engine.connect() as conn:
|
||||
... conn.execute("SET search_path TO test_schema, public")
|
||||
... meta = MetaData()
|
||||
... referring = Table('referring', meta, autoload=True, autoload_with=conn)
|
||||
...
|
||||
<sqlalchemy.engine.result.ResultProxy object at 0x101612ed0>
|
||||
|
||||
The above process would deliver to the :attr:`.MetaData.tables` collection
|
||||
``referred`` table named **without** the schema::
|
||||
|
||||
>>> meta.tables['referred'].schema is None
|
||||
True
|
||||
|
||||
To alter the behavior of reflection such that the referred schema is maintained
|
||||
regardless of the ``search_path`` setting, use the ``postgresql_ignore_search_path``
|
||||
option, which can be specified as a dialect-specific argument to both
|
||||
:class:`.Table` as well as :meth:`.MetaData.reflect`::
|
||||
|
||||
>>> with engine.connect() as conn:
|
||||
... conn.execute("SET search_path TO test_schema, public")
|
||||
... meta = MetaData()
|
||||
... referring = Table('referring', meta, autoload=True, autoload_with=conn,
|
||||
... postgresql_ignore_search_path=True)
|
||||
...
|
||||
<sqlalchemy.engine.result.ResultProxy object at 0x1016126d0>
|
||||
|
||||
We will now have ``test_schema.referred`` stored as schema-qualified::
|
||||
|
||||
>>> meta.tables['test_schema.referred'].schema
|
||||
'test_schema'
|
||||
|
||||
.. sidebar:: Best Practices for Postgresql Schema reflection
|
||||
|
||||
The description of Postgresql schema reflection behavior is complex, and is
|
||||
the product of many years of dealing with widely varied use cases and user preferences.
|
||||
But in fact, there's no need to understand any of it if you just stick to the simplest
|
||||
use pattern: leave the ``search_path`` set to its default of ``public`` only, never refer
|
||||
to the name ``public`` as an explicit schema name otherwise, and
|
||||
refer to all other schema names explicitly when building
|
||||
up a :class:`.Table` object. The options described here are only for those users
|
||||
who can't, or prefer not to, stay within these guidelines.
|
||||
|
||||
Note that **in all cases**, the "default" schema is always reflected as ``None``.
|
||||
The "default" schema on Postgresql is that which is returned by the
|
||||
Postgresql ``current_schema()`` function. On a typical Postgresql installation,
|
||||
this is the name ``public``. So a table that refers to another which is
|
||||
in the ``public`` (i.e. default) schema will always have the ``.schema`` attribute
|
||||
set to ``None``.
|
||||
|
||||
.. versionadded:: 0.9.2 Added the ``postgresql_ignore_search_path``
|
||||
dialect-level option accepted by :class:`.Table` and :meth:`.MetaData.reflect`.
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
`The Schema Search Path <http://www.postgresql.org/docs/9.0/static/ddl-schemas.html#DDL-SCHEMAS-PATH>`_ - on the Postgresql website.
|
||||
Background on PG's ``search_path`` is at:
|
||||
http://www.postgresql.org/docs/9.0/static/ddl-schemas.html#DDL-SCHEMAS-PATH
|
||||
|
||||
INSERT/UPDATE...RETURNING
|
||||
-------------------------
|
||||
@ -227,44 +131,6 @@ use the :meth:`._UpdateBase.returning` method on a per-statement basis::
|
||||
where(table.c.name=='foo')
|
||||
print result.fetchall()
|
||||
|
||||
.. _postgresql_match:
|
||||
|
||||
Full Text Search
|
||||
----------------
|
||||
|
||||
SQLAlchemy makes available the Postgresql ``@@`` operator via the
|
||||
:meth:`.ColumnElement.match` method on any textual column expression.
|
||||
On a Postgresql dialect, an expression like the following::
|
||||
|
||||
select([sometable.c.text.match("search string")])
|
||||
|
||||
will emit to the database::
|
||||
|
||||
SELECT text @@ to_tsquery('search string') FROM table
|
||||
|
||||
The Postgresql text search functions such as ``to_tsquery()``
|
||||
and ``to_tsvector()`` are available
|
||||
explicitly using the standard :attr:`.func` construct. For example::
|
||||
|
||||
select([
|
||||
func.to_tsvector('fat cats ate rats').match('cat & rat')
|
||||
])
|
||||
|
||||
Emits the equivalent of::
|
||||
|
||||
SELECT to_tsvector('fat cats ate rats') @@ to_tsquery('cat & rat')
|
||||
|
||||
The :class:`.postgresql.TSVECTOR` type can provide for explicit CAST::
|
||||
|
||||
from sqlalchemy.dialects.postgresql import TSVECTOR
|
||||
from sqlalchemy import select, cast
|
||||
select([cast("some text", TSVECTOR)])
|
||||
|
||||
produces a statement equivalent to::
|
||||
|
||||
SELECT CAST('some text' AS TSVECTOR) AS anon_1
|
||||
|
||||
|
||||
FROM ONLY ...
|
||||
------------------------
|
||||
|
||||
@ -339,12 +205,13 @@ underlying CREATE INDEX command, so it *must* be a valid index type for your
|
||||
version of PostgreSQL.
|
||||
|
||||
"""
|
||||
|
||||
from collections import defaultdict
|
||||
import re
|
||||
|
||||
from ... import sql, schema, exc, util
|
||||
from ...engine import default, reflection
|
||||
from ...sql import compiler, expression, operators
|
||||
from ...sql import compiler, expression, util as sql_util, operators
|
||||
from ... import types as sqltypes
|
||||
|
||||
try:
|
||||
@ -364,7 +231,7 @@ RESERVED_WORDS = set(
|
||||
"default", "deferrable", "desc", "distinct", "do", "else", "end",
|
||||
"except", "false", "fetch", "for", "foreign", "from", "grant", "group",
|
||||
"having", "in", "initially", "intersect", "into", "leading", "limit",
|
||||
"localtime", "localtimestamp", "new", "not", "null", "of", "off", "offset",
|
||||
"localtime", "localtimestamp", "new", "not", "null", "off", "offset",
|
||||
"old", "on", "only", "or", "order", "placing", "primary", "references",
|
||||
"returning", "select", "session_user", "some", "symmetric", "table",
|
||||
"then", "to", "trailing", "true", "union", "unique", "user", "using",
|
||||
@ -484,7 +351,7 @@ class UUID(sqltypes.TypeEngine):
|
||||
if self.as_uuid:
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = util.text_type(value)
|
||||
value = str(value)
|
||||
return value
|
||||
return process
|
||||
else:
|
||||
@ -502,23 +369,6 @@ class UUID(sqltypes.TypeEngine):
|
||||
|
||||
PGUuid = UUID
|
||||
|
||||
class TSVECTOR(sqltypes.TypeEngine):
|
||||
"""The :class:`.postgresql.TSVECTOR` type implements the Postgresql
|
||||
text search type TSVECTOR.
|
||||
|
||||
It can be used to do full text queries on natural language
|
||||
documents.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`postgresql_match`
|
||||
|
||||
"""
|
||||
__visit_name__ = 'TSVECTOR'
|
||||
|
||||
|
||||
|
||||
class _Slice(expression.ColumnElement):
|
||||
__visit_name__ = 'slice'
|
||||
@ -834,10 +684,6 @@ class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine):
|
||||
self.as_tuple = as_tuple
|
||||
self.dimensions = dimensions
|
||||
|
||||
@property
|
||||
def python_type(self):
|
||||
return list
|
||||
|
||||
def compare_values(self, x, y):
|
||||
return x == y
|
||||
|
||||
@ -1068,7 +914,6 @@ ischema_names = {
|
||||
'interval': INTERVAL,
|
||||
'interval year to month': INTERVAL,
|
||||
'interval day to second': INTERVAL,
|
||||
'tsvector' : TSVECTOR
|
||||
}
|
||||
|
||||
|
||||
@ -1110,30 +955,25 @@ class PGCompiler(compiler.SQLCompiler):
|
||||
|
||||
def visit_ilike_op_binary(self, binary, operator, **kw):
|
||||
escape = binary.modifiers.get("escape", None)
|
||||
|
||||
return '%s ILIKE %s' % \
|
||||
(self.process(binary.left, **kw),
|
||||
self.process(binary.right, **kw)) \
|
||||
+ (
|
||||
' ESCAPE ' +
|
||||
self.render_literal_value(escape, sqltypes.STRINGTYPE)
|
||||
if escape else ''
|
||||
)
|
||||
+ (escape and
|
||||
(' ESCAPE ' + self.render_literal_value(escape, None))
|
||||
or '')
|
||||
|
||||
def visit_notilike_op_binary(self, binary, operator, **kw):
|
||||
escape = binary.modifiers.get("escape", None)
|
||||
return '%s NOT ILIKE %s' % \
|
||||
(self.process(binary.left, **kw),
|
||||
self.process(binary.right, **kw)) \
|
||||
+ (
|
||||
' ESCAPE ' +
|
||||
self.render_literal_value(escape, sqltypes.STRINGTYPE)
|
||||
if escape else ''
|
||||
)
|
||||
+ (escape and
|
||||
(' ESCAPE ' + self.render_literal_value(escape, None))
|
||||
or '')
|
||||
|
||||
def render_literal_value(self, value, type_):
|
||||
value = super(PGCompiler, self).render_literal_value(value, type_)
|
||||
|
||||
# TODO: need to inspect "standard_conforming_strings"
|
||||
if self.dialect._backslash_escapes:
|
||||
value = value.replace('\\', '\\\\')
|
||||
return value
|
||||
@ -1170,25 +1010,14 @@ class PGCompiler(compiler.SQLCompiler):
|
||||
return ""
|
||||
|
||||
def for_update_clause(self, select):
|
||||
|
||||
if select._for_update_arg.read:
|
||||
tmp = " FOR SHARE"
|
||||
if select.for_update == 'nowait':
|
||||
return " FOR UPDATE NOWAIT"
|
||||
elif select.for_update == 'read':
|
||||
return " FOR SHARE"
|
||||
elif select.for_update == 'read_nowait':
|
||||
return " FOR SHARE NOWAIT"
|
||||
else:
|
||||
tmp = " FOR UPDATE"
|
||||
|
||||
if select._for_update_arg.of:
|
||||
tables = util.OrderedSet(
|
||||
c.table if isinstance(c, expression.ColumnClause)
|
||||
else c for c in select._for_update_arg.of)
|
||||
tmp += " OF " + ", ".join(
|
||||
self.process(table, ashint=True)
|
||||
for table in tables
|
||||
)
|
||||
|
||||
if select._for_update_arg.nowait:
|
||||
tmp += " NOWAIT"
|
||||
|
||||
return tmp
|
||||
return super(PGCompiler, self).for_update_clause(select)
|
||||
|
||||
def returning_clause(self, stmt, returning_cols):
|
||||
|
||||
@ -1211,15 +1040,12 @@ class PGCompiler(compiler.SQLCompiler):
|
||||
|
||||
class PGDDLCompiler(compiler.DDLCompiler):
|
||||
def get_column_specification(self, column, **kwargs):
|
||||
|
||||
colspec = self.preparer.format_column(column)
|
||||
impl_type = column.type.dialect_impl(self.dialect)
|
||||
if column.primary_key and \
|
||||
column is column.table._autoincrement_column and \
|
||||
not isinstance(impl_type, sqltypes.SmallInteger) and \
|
||||
(
|
||||
self.dialect.supports_smallserial or
|
||||
not isinstance(impl_type, sqltypes.SmallInteger)
|
||||
) and (
|
||||
column.default is None or
|
||||
(
|
||||
isinstance(column.default, schema.Sequence) and
|
||||
@ -1227,8 +1053,6 @@ class PGDDLCompiler(compiler.DDLCompiler):
|
||||
)):
|
||||
if isinstance(impl_type, sqltypes.BigInteger):
|
||||
colspec += " BIGSERIAL"
|
||||
elif isinstance(impl_type, sqltypes.SmallInteger):
|
||||
colspec += " SMALLSERIAL"
|
||||
else:
|
||||
colspec += " SERIAL"
|
||||
else:
|
||||
@ -1246,9 +1070,7 @@ class PGDDLCompiler(compiler.DDLCompiler):
|
||||
|
||||
return "CREATE TYPE %s AS ENUM (%s)" % (
|
||||
self.preparer.format_type(type_),
|
||||
", ".join(
|
||||
self.sql_compiler.process(sql.literal(e), literal_binds=True)
|
||||
for e in type_.enums)
|
||||
",".join("'%s'" % e for e in type_.enums)
|
||||
)
|
||||
|
||||
def visit_drop_enum_type(self, drop):
|
||||
@ -1271,11 +1093,11 @@ class PGDDLCompiler(compiler.DDLCompiler):
|
||||
preparer.format_table(index.table)
|
||||
)
|
||||
|
||||
using = index.dialect_options['postgresql']['using']
|
||||
if using:
|
||||
text += "USING %s " % preparer.quote(using)
|
||||
if 'postgresql_using' in index.kwargs:
|
||||
using = index.kwargs['postgresql_using']
|
||||
text += "USING %s " % preparer.quote(using, index.quote)
|
||||
|
||||
ops = index.dialect_options["postgresql"]["ops"]
|
||||
ops = index.kwargs.get('postgresql_ops', {})
|
||||
text += "(%s)" \
|
||||
% (
|
||||
', '.join([
|
||||
@ -1288,7 +1110,10 @@ class PGDDLCompiler(compiler.DDLCompiler):
|
||||
for expr, c in zip(index.expressions, index.columns)])
|
||||
)
|
||||
|
||||
whereclause = index.dialect_options["postgresql"]["where"]
|
||||
if 'postgresql_where' in index.kwargs:
|
||||
whereclause = index.kwargs['postgresql_where']
|
||||
else:
|
||||
whereclause = None
|
||||
|
||||
if whereclause is not None:
|
||||
where_compiled = self.sql_compiler.process(
|
||||
@ -1305,7 +1130,7 @@ class PGDDLCompiler(compiler.DDLCompiler):
|
||||
elements = []
|
||||
for c in constraint.columns:
|
||||
op = constraint.operators[c.name]
|
||||
elements.append(self.preparer.quote(c.name) + ' WITH '+op)
|
||||
elements.append(self.preparer.quote(c.name, c.quote)+' WITH '+op)
|
||||
text += "EXCLUDE USING %s (%s)" % (constraint.using, ', '.join(elements))
|
||||
if constraint.where is not None:
|
||||
text += ' WHERE (%s)' % self.sql_compiler.process(
|
||||
@ -1316,9 +1141,6 @@ class PGDDLCompiler(compiler.DDLCompiler):
|
||||
|
||||
|
||||
class PGTypeCompiler(compiler.GenericTypeCompiler):
|
||||
def visit_TSVECTOR(self, type):
|
||||
return "TSVECTOR"
|
||||
|
||||
def visit_INET(self, type_):
|
||||
return "INET"
|
||||
|
||||
@ -1343,9 +1165,6 @@ class PGTypeCompiler(compiler.GenericTypeCompiler):
|
||||
def visit_HSTORE(self, type_):
|
||||
return "HSTORE"
|
||||
|
||||
def visit_JSON(self, type_):
|
||||
return "JSON"
|
||||
|
||||
def visit_INT4RANGE(self, type_):
|
||||
return "INT4RANGE"
|
||||
|
||||
@ -1434,9 +1253,9 @@ class PGIdentifierPreparer(compiler.IdentifierPreparer):
|
||||
if not type_.name:
|
||||
raise exc.CompileError("Postgresql ENUM type requires a name.")
|
||||
|
||||
name = self.quote(type_.name)
|
||||
name = self.quote(type_.name, type_.quote)
|
||||
if not self.omit_schema and use_schema and type_.schema is not None:
|
||||
name = self.quote_schema(type_.schema) + "." + name
|
||||
name = self.quote_schema(type_.schema, type_.quote) + "." + name
|
||||
return name
|
||||
|
||||
|
||||
@ -1512,7 +1331,6 @@ class PGDialect(default.DefaultDialect):
|
||||
|
||||
supports_native_enum = True
|
||||
supports_native_boolean = True
|
||||
supports_smallserial = True
|
||||
|
||||
supports_sequences = True
|
||||
sequences_optional = True
|
||||
@ -1534,27 +1352,12 @@ class PGDialect(default.DefaultDialect):
|
||||
inspector = PGInspector
|
||||
isolation_level = None
|
||||
|
||||
construct_arguments = [
|
||||
(schema.Index, {
|
||||
"using": False,
|
||||
"where": None,
|
||||
"ops": {}
|
||||
}),
|
||||
(schema.Table, {
|
||||
"ignore_search_path": False
|
||||
})
|
||||
]
|
||||
|
||||
reflection_options = ('postgresql_ignore_search_path', )
|
||||
|
||||
# TODO: need to inspect "standard_conforming_strings"
|
||||
_backslash_escapes = True
|
||||
|
||||
def __init__(self, isolation_level=None, json_serializer=None,
|
||||
json_deserializer=None, **kwargs):
|
||||
def __init__(self, isolation_level=None, **kwargs):
|
||||
default.DefaultDialect.__init__(self, **kwargs)
|
||||
self.isolation_level = isolation_level
|
||||
self._json_deserializer = json_deserializer
|
||||
self._json_serializer = json_serializer
|
||||
|
||||
def initialize(self, connection):
|
||||
super(PGDialect, self).initialize(connection)
|
||||
@ -1568,14 +1371,6 @@ class PGDialect(default.DefaultDialect):
|
||||
# psycopg2, others may have placed ENUM here as well
|
||||
self.colspecs.pop(ENUM, None)
|
||||
|
||||
# http://www.postgresql.org/docs/9.3/static/release-9-2.html#AEN116689
|
||||
self.supports_smallserial = self.server_version_info >= (9, 2)
|
||||
|
||||
self._backslash_escapes = self.server_version_info < (8, 2) or \
|
||||
connection.scalar(
|
||||
"show standard_conforming_strings"
|
||||
) == 'off'
|
||||
|
||||
def on_connect(self):
|
||||
if self.isolation_level is not None:
|
||||
def connect(conn):
|
||||
@ -1656,7 +1451,7 @@ class PGDialect(default.DefaultDialect):
|
||||
query,
|
||||
bindparams=[
|
||||
sql.bindparam(
|
||||
'schema', util.text_type(schema.lower()),
|
||||
'schema', unicode(schema.lower()),
|
||||
type_=sqltypes.Unicode)]
|
||||
)
|
||||
)
|
||||
@ -1672,7 +1467,7 @@ class PGDialect(default.DefaultDialect):
|
||||
"n.oid=c.relnamespace where n.nspname=current_schema() and "
|
||||
"relname=:name",
|
||||
bindparams=[
|
||||
sql.bindparam('name', util.text_type(table_name),
|
||||
sql.bindparam('name', unicode(table_name),
|
||||
type_=sqltypes.Unicode)]
|
||||
)
|
||||
)
|
||||
@ -1684,9 +1479,9 @@ class PGDialect(default.DefaultDialect):
|
||||
"relname=:name",
|
||||
bindparams=[
|
||||
sql.bindparam('name',
|
||||
util.text_type(table_name), type_=sqltypes.Unicode),
|
||||
unicode(table_name), type_=sqltypes.Unicode),
|
||||
sql.bindparam('schema',
|
||||
util.text_type(schema), type_=sqltypes.Unicode)]
|
||||
unicode(schema), type_=sqltypes.Unicode)]
|
||||
)
|
||||
)
|
||||
return bool(cursor.first())
|
||||
@ -1700,7 +1495,7 @@ class PGDialect(default.DefaultDialect):
|
||||
"n.nspname=current_schema() "
|
||||
"and relname=:name",
|
||||
bindparams=[
|
||||
sql.bindparam('name', util.text_type(sequence_name),
|
||||
sql.bindparam('name', unicode(sequence_name),
|
||||
type_=sqltypes.Unicode)
|
||||
]
|
||||
)
|
||||
@ -1712,10 +1507,10 @@ class PGDialect(default.DefaultDialect):
|
||||
"n.oid=c.relnamespace where relkind='S' and "
|
||||
"n.nspname=:schema and relname=:name",
|
||||
bindparams=[
|
||||
sql.bindparam('name', util.text_type(sequence_name),
|
||||
sql.bindparam('name', unicode(sequence_name),
|
||||
type_=sqltypes.Unicode),
|
||||
sql.bindparam('schema',
|
||||
util.text_type(schema), type_=sqltypes.Unicode)
|
||||
unicode(schema), type_=sqltypes.Unicode)
|
||||
]
|
||||
)
|
||||
)
|
||||
@ -1723,6 +1518,12 @@ class PGDialect(default.DefaultDialect):
|
||||
return bool(cursor.first())
|
||||
|
||||
def has_type(self, connection, type_name, schema=None):
|
||||
bindparams = [
|
||||
sql.bindparam('typname',
|
||||
unicode(type_name), type_=sqltypes.Unicode),
|
||||
sql.bindparam('nspname',
|
||||
unicode(schema), type_=sqltypes.Unicode),
|
||||
]
|
||||
if schema is not None:
|
||||
query = """
|
||||
SELECT EXISTS (
|
||||
@ -1732,7 +1533,6 @@ class PGDialect(default.DefaultDialect):
|
||||
AND n.nspname = :nspname
|
||||
)
|
||||
"""
|
||||
query = sql.text(query)
|
||||
else:
|
||||
query = """
|
||||
SELECT EXISTS (
|
||||
@ -1741,17 +1541,7 @@ class PGDialect(default.DefaultDialect):
|
||||
AND pg_type_is_visible(t.oid)
|
||||
)
|
||||
"""
|
||||
query = sql.text(query)
|
||||
query = query.bindparams(
|
||||
sql.bindparam('typname',
|
||||
util.text_type(type_name), type_=sqltypes.Unicode),
|
||||
)
|
||||
if schema is not None:
|
||||
query = query.bindparams(
|
||||
sql.bindparam('nspname',
|
||||
util.text_type(schema), type_=sqltypes.Unicode),
|
||||
)
|
||||
cursor = connection.execute(query)
|
||||
cursor = connection.execute(sql.text(query, bindparams=bindparams))
|
||||
return bool(cursor.scalar())
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
@ -1788,13 +1578,15 @@ class PGDialect(default.DefaultDialect):
|
||||
""" % schema_where_clause
|
||||
# Since we're binding to unicode, table_name and schema_name must be
|
||||
# unicode.
|
||||
table_name = util.text_type(table_name)
|
||||
table_name = unicode(table_name)
|
||||
if schema is not None:
|
||||
schema = util.text_type(schema)
|
||||
s = sql.text(query).bindparams(table_name=sqltypes.Unicode)
|
||||
s = s.columns(oid=sqltypes.Integer)
|
||||
if schema:
|
||||
s = s.bindparams(sql.bindparam('schema', type_=sqltypes.Unicode))
|
||||
schema = unicode(schema)
|
||||
s = sql.text(query, bindparams=[
|
||||
sql.bindparam('table_name', type_=sqltypes.Unicode),
|
||||
sql.bindparam('schema', type_=sqltypes.Unicode)
|
||||
],
|
||||
typemap={'oid': sqltypes.Integer}
|
||||
)
|
||||
c = connection.execute(s, table_name=table_name, schema=schema)
|
||||
table_oid = c.scalar()
|
||||
if table_oid is None:
|
||||
@ -1810,13 +1602,13 @@ class PGDialect(default.DefaultDialect):
|
||||
"""
|
||||
rp = connection.execute(s)
|
||||
# what about system tables?
|
||||
|
||||
if util.py2k:
|
||||
schema_names = [row[0].decode(self.encoding) for row in rp \
|
||||
if not row[0].startswith('pg_')]
|
||||
else:
|
||||
schema_names = [row[0] for row in rp \
|
||||
# Py3K
|
||||
#schema_names = [row[0] for row in rp \
|
||||
# if not row[0].startswith('pg_')]
|
||||
# Py2K
|
||||
schema_names = [row[0].decode(self.encoding) for row in rp \
|
||||
if not row[0].startswith('pg_')]
|
||||
# end Py2K
|
||||
return schema_names
|
||||
|
||||
@reflection.cache
|
||||
@ -1827,7 +1619,7 @@ class PGDialect(default.DefaultDialect):
|
||||
current_schema = self.default_schema_name
|
||||
|
||||
result = connection.execute(
|
||||
sql.text("SELECT relname FROM pg_class c "
|
||||
sql.text(u"SELECT relname FROM pg_class c "
|
||||
"WHERE relkind = 'r' "
|
||||
"AND '%s' = (select nspname from pg_namespace n "
|
||||
"where n.oid = c.relnamespace) " %
|
||||
@ -1850,12 +1642,12 @@ class PGDialect(default.DefaultDialect):
|
||||
AND '%(schema)s' = (select nspname from pg_namespace n
|
||||
where n.oid = c.relnamespace)
|
||||
""" % dict(schema=current_schema)
|
||||
|
||||
if util.py2k:
|
||||
view_names = [row[0].decode(self.encoding)
|
||||
# Py3K
|
||||
#view_names = [row[0] for row in connection.execute(s)]
|
||||
# Py2K
|
||||
view_names = [row[0].decode(self.encoding)
|
||||
for row in connection.execute(s)]
|
||||
else:
|
||||
view_names = [row[0] for row in connection.execute(s)]
|
||||
# end Py2K
|
||||
return view_names
|
||||
|
||||
@reflection.cache
|
||||
@ -1872,10 +1664,11 @@ class PGDialect(default.DefaultDialect):
|
||||
rp = connection.execute(sql.text(s),
|
||||
view_name=view_name, schema=current_schema)
|
||||
if rp:
|
||||
if util.py2k:
|
||||
view_def = rp.scalar().decode(self.encoding)
|
||||
else:
|
||||
view_def = rp.scalar()
|
||||
# Py3K
|
||||
#view_def = rp.scalar()
|
||||
# Py2K
|
||||
view_def = rp.scalar().decode(self.encoding)
|
||||
# end Py2K
|
||||
return view_def
|
||||
|
||||
@reflection.cache
|
||||
@ -2075,8 +1868,7 @@ class PGDialect(default.DefaultDialect):
|
||||
return {'constrained_columns': cols, 'name': name}
|
||||
|
||||
@reflection.cache
|
||||
def get_foreign_keys(self, connection, table_name, schema=None,
|
||||
postgresql_ignore_search_path=False, **kw):
|
||||
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
|
||||
preparer = self.identifier_preparer
|
||||
table_oid = self.get_table_oid(connection, table_name, schema,
|
||||
info_cache=kw.get('info_cache'))
|
||||
@ -2095,15 +1887,6 @@ class PGDialect(default.DefaultDialect):
|
||||
n.oid = c.relnamespace
|
||||
ORDER BY 1
|
||||
"""
|
||||
# http://www.postgresql.org/docs/9.0/static/sql-createtable.html
|
||||
FK_REGEX = re.compile(
|
||||
r'FOREIGN KEY \((.*?)\) REFERENCES (?:(.*?)\.)?(.*?)\((.*?)\)'
|
||||
r'[\s]?(MATCH (FULL|PARTIAL|SIMPLE)+)?'
|
||||
r'[\s]?(ON UPDATE (CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?'
|
||||
r'[\s]?(ON DELETE (CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?'
|
||||
r'[\s]?(DEFERRABLE|NOT DEFERRABLE)?'
|
||||
r'[\s]?(INITIALLY (DEFERRED|IMMEDIATE)+)?'
|
||||
)
|
||||
|
||||
t = sql.text(FK_SQL, typemap={
|
||||
'conname': sqltypes.Unicode,
|
||||
@ -2111,36 +1894,24 @@ class PGDialect(default.DefaultDialect):
|
||||
c = connection.execute(t, table=table_oid)
|
||||
fkeys = []
|
||||
for conname, condef, conschema in c.fetchall():
|
||||
m = re.search(FK_REGEX, condef).groups()
|
||||
|
||||
m = re.search('FOREIGN KEY \((.*?)\) REFERENCES '
|
||||
'(?:(.*?)\.)?(.*?)\((.*?)\)', condef).groups()
|
||||
constrained_columns, referred_schema, \
|
||||
referred_table, referred_columns, \
|
||||
_, match, _, onupdate, _, ondelete, \
|
||||
deferrable, _, initially = m
|
||||
|
||||
if deferrable is not None:
|
||||
deferrable = True if deferrable == 'DEFERRABLE' else False
|
||||
referred_table, referred_columns = m
|
||||
constrained_columns = [preparer._unquote_identifier(x)
|
||||
for x in re.split(r'\s*,\s*', constrained_columns)]
|
||||
|
||||
if postgresql_ignore_search_path:
|
||||
# when ignoring search path, we use the actual schema
|
||||
# provided it isn't the "default" schema
|
||||
if conschema != self.default_schema_name:
|
||||
referred_schema = conschema
|
||||
else:
|
||||
referred_schema = schema
|
||||
elif referred_schema:
|
||||
# referred_schema is the schema that we regexp'ed from
|
||||
# pg_get_constraintdef(). If the schema is in the search
|
||||
# path, pg_get_constraintdef() will give us None.
|
||||
referred_schema = \
|
||||
if referred_schema:
|
||||
referred_schema =\
|
||||
preparer._unquote_identifier(referred_schema)
|
||||
elif schema is not None and schema == conschema:
|
||||
# If the actual schema matches the schema of the table
|
||||
# we're reflecting, then we will use that.
|
||||
# no schema was returned by pg_get_constraintdef(). This
|
||||
# means the schema is in the search path. We will leave
|
||||
# it as None, unless the actual schema, which we pull out
|
||||
# from pg_namespace even though pg_get_constraintdef() doesn't
|
||||
# want to give it to us, matches that of the referencing table,
|
||||
# and an explicit schema was given for the referencing table.
|
||||
referred_schema = schema
|
||||
|
||||
referred_table = preparer._unquote_identifier(referred_table)
|
||||
referred_columns = [preparer._unquote_identifier(x)
|
||||
for x in re.split(r'\s*,\s', referred_columns)]
|
||||
@ -2149,14 +1920,7 @@ class PGDialect(default.DefaultDialect):
|
||||
'constrained_columns': constrained_columns,
|
||||
'referred_schema': referred_schema,
|
||||
'referred_table': referred_table,
|
||||
'referred_columns': referred_columns,
|
||||
'options': {
|
||||
'onupdate': onupdate,
|
||||
'ondelete': ondelete,
|
||||
'deferrable': deferrable,
|
||||
'initially': initially,
|
||||
'match': match
|
||||
}
|
||||
'referred_columns': referred_columns
|
||||
}
|
||||
fkeys.append(fkey_d)
|
||||
return fkeys
|
||||
@ -2257,31 +2021,25 @@ class PGDialect(default.DefaultDialect):
|
||||
UNIQUE_SQL = """
|
||||
SELECT
|
||||
cons.conname as name,
|
||||
cons.conkey as key,
|
||||
a.attnum as col_num,
|
||||
a.attname as col_name
|
||||
ARRAY_AGG(a.attname) as column_names
|
||||
FROM
|
||||
pg_catalog.pg_constraint cons
|
||||
join pg_attribute a
|
||||
on cons.conrelid = a.attrelid AND a.attnum = ANY(cons.conkey)
|
||||
left outer join pg_attribute a
|
||||
on cons.conrelid = a.attrelid and a.attnum = ANY(cons.conkey)
|
||||
WHERE
|
||||
cons.conrelid = :table_oid AND
|
||||
cons.contype = 'u'
|
||||
GROUP BY
|
||||
cons.conname
|
||||
"""
|
||||
|
||||
t = sql.text(UNIQUE_SQL, typemap={'col_name': sqltypes.Unicode})
|
||||
t = sql.text(UNIQUE_SQL,
|
||||
typemap={'column_names': ARRAY(sqltypes.Unicode)})
|
||||
c = connection.execute(t, table_oid=table_oid)
|
||||
|
||||
uniques = defaultdict(lambda: defaultdict(dict))
|
||||
for row in c.fetchall():
|
||||
uc = uniques[row.name]
|
||||
uc["key"] = row.key
|
||||
uc["cols"][row.col_num] = row.col_name
|
||||
|
||||
return [
|
||||
{'name': name,
|
||||
'column_names': [uc["cols"][i] for i in uc["key"]]}
|
||||
for name, uc in uniques.items()
|
||||
{'name': row.name, 'column_names': row.column_names}
|
||||
for row in c.fetchall()
|
||||
]
|
||||
|
||||
def _load_enums(self, connection):
|
||||
|
@ -47,10 +47,10 @@ class ExcludeConstraint(ColumnCollectionConstraint):
|
||||
"""
|
||||
ColumnCollectionConstraint.__init__(
|
||||
self,
|
||||
*[col for col, op in elements],
|
||||
name=kw.get('name'),
|
||||
deferrable=kw.get('deferrable'),
|
||||
initially=kw.get('initially')
|
||||
initially=kw.get('initially'),
|
||||
*[col for col, op in elements]
|
||||
)
|
||||
self.operators = {}
|
||||
for col_or_string, op in elements:
|
||||
@ -64,10 +64,10 @@ class ExcludeConstraint(ColumnCollectionConstraint):
|
||||
def copy(self, **kw):
|
||||
elements = [(col, self.operators[col])
|
||||
for col in self.columns.keys()]
|
||||
c = self.__class__(*elements,
|
||||
name=self.name,
|
||||
c = self.__class__(name=self.name,
|
||||
deferrable=self.deferrable,
|
||||
initially=self.initially)
|
||||
initially=self.initially,
|
||||
*elements)
|
||||
c.dispatch._update(self.dispatch)
|
||||
return c
|
||||
|
||||
|
@ -52,7 +52,7 @@ def _parse_error(hstore_str, pos):
|
||||
|
||||
|
||||
def _parse_hstore(hstore_str):
|
||||
"""Parse an hstore from its literal string representation.
|
||||
"""Parse an hstore from it's literal string representation.
|
||||
|
||||
Attempts to approximate PG's hstore input parsing rules as closely as
|
||||
possible. Although currently this is not strictly necessary, since the
|
||||
@ -97,14 +97,14 @@ def _serialize_hstore(val):
|
||||
def esc(s, position):
|
||||
if position == 'value' and s is None:
|
||||
return 'NULL'
|
||||
elif isinstance(s, util.string_types):
|
||||
elif isinstance(s, basestring):
|
||||
return '"%s"' % s.replace("\\", "\\\\").replace('"', r'\"')
|
||||
else:
|
||||
raise ValueError("%r in %s position is not a string." %
|
||||
(s, position))
|
||||
|
||||
return ', '.join('%s=>%s' % (esc(k, 'key'), esc(v, 'value'))
|
||||
for k, v in val.items())
|
||||
for k, v in val.iteritems())
|
||||
|
||||
|
||||
class HSTORE(sqltypes.Concatenable, sqltypes.TypeEngine):
|
||||
|
@ -1,199 +0,0 @@
|
||||
# postgresql/json.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
|
||||
from .base import ischema_names
|
||||
from ... import types as sqltypes
|
||||
from ...sql.operators import custom_op
|
||||
from ... import sql
|
||||
from ...sql import elements
|
||||
from ... import util
|
||||
|
||||
__all__ = ('JSON', 'JSONElement')
|
||||
|
||||
|
||||
class JSONElement(elements.BinaryExpression):
|
||||
"""Represents accessing an element of a :class:`.JSON` value.
|
||||
|
||||
The :class:`.JSONElement` is produced whenever using the Python index
|
||||
operator on an expression that has the type :class:`.JSON`::
|
||||
|
||||
expr = mytable.c.json_data['some_key']
|
||||
|
||||
The expression typically compiles to a JSON access such as ``col -> key``.
|
||||
Modifiers are then available for typing behavior, including :meth:`.JSONElement.cast`
|
||||
and :attr:`.JSONElement.astext`.
|
||||
|
||||
"""
|
||||
def __init__(self, left, right, astext=False, opstring=None, result_type=None):
|
||||
self._astext = astext
|
||||
if opstring is None:
|
||||
if hasattr(right, '__iter__') and \
|
||||
not isinstance(right, util.string_types):
|
||||
opstring = "#>"
|
||||
right = "{%s}" % (", ".join(util.text_type(elem) for elem in right))
|
||||
else:
|
||||
opstring = "->"
|
||||
|
||||
self._json_opstring = opstring
|
||||
operator = custom_op(opstring, precedence=5)
|
||||
right = left._check_literal(left, operator, right)
|
||||
super(JSONElement, self).__init__(left, right, operator, type_=result_type)
|
||||
|
||||
@property
|
||||
def astext(self):
|
||||
"""Convert this :class:`.JSONElement` to use the 'astext' operator
|
||||
when evaluated.
|
||||
|
||||
E.g.::
|
||||
|
||||
select([data_table.c.data['some key'].astext])
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.JSONElement.cast`
|
||||
|
||||
"""
|
||||
if self._astext:
|
||||
return self
|
||||
else:
|
||||
return JSONElement(
|
||||
self.left,
|
||||
self.right,
|
||||
astext=True,
|
||||
opstring=self._json_opstring + ">",
|
||||
result_type=sqltypes.String(convert_unicode=True)
|
||||
)
|
||||
|
||||
def cast(self, type_):
|
||||
"""Convert this :class:`.JSONElement` to apply both the 'astext' operator
|
||||
as well as an explicit type cast when evaulated.
|
||||
|
||||
E.g.::
|
||||
|
||||
select([data_table.c.data['some key'].cast(Integer)])
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.JSONElement.astext`
|
||||
|
||||
"""
|
||||
if not self._astext:
|
||||
return self.astext.cast(type_)
|
||||
else:
|
||||
return sql.cast(self, type_)
|
||||
|
||||
|
||||
class JSON(sqltypes.TypeEngine):
|
||||
"""Represent the Postgresql JSON type.
|
||||
|
||||
The :class:`.JSON` type stores arbitrary JSON format data, e.g.::
|
||||
|
||||
data_table = Table('data_table', metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('data', JSON)
|
||||
)
|
||||
|
||||
with engine.connect() as conn:
|
||||
conn.execute(
|
||||
data_table.insert(),
|
||||
data = {"key1": "value1", "key2": "value2"}
|
||||
)
|
||||
|
||||
:class:`.JSON` provides several operations:
|
||||
|
||||
* Index operations::
|
||||
|
||||
data_table.c.data['some key']
|
||||
|
||||
* Index operations returning text (required for text comparison)::
|
||||
|
||||
data_table.c.data['some key'].astext == 'some value'
|
||||
|
||||
* Index operations with a built-in CAST call::
|
||||
|
||||
data_table.c.data['some key'].cast(Integer) == 5
|
||||
|
||||
* Path index operations::
|
||||
|
||||
data_table.c.data[('key_1', 'key_2', ..., 'key_n')]
|
||||
|
||||
* Path index operations returning text (required for text comparison)::
|
||||
|
||||
data_table.c.data[('key_1', 'key_2', ..., 'key_n')].astext == 'some value'
|
||||
|
||||
Index operations return an instance of :class:`.JSONElement`, which represents
|
||||
an expression such as ``column -> index``. This element then defines
|
||||
methods such as :attr:`.JSONElement.astext` and :meth:`.JSONElement.cast`
|
||||
for setting up type behavior.
|
||||
|
||||
The :class:`.JSON` type, when used with the SQLAlchemy ORM, does not detect
|
||||
in-place mutations to the structure. In order to detect these, the
|
||||
:mod:`sqlalchemy.ext.mutable` extension must be used. This extension will
|
||||
allow "in-place" changes to the datastructure to produce events which
|
||||
will be detected by the unit of work. See the example at :class:`.HSTORE`
|
||||
for a simple example involving a dictionary.
|
||||
|
||||
Custom serializers and deserializers are specified at the dialect level,
|
||||
that is using :func:`.create_engine`. The reason for this is that when
|
||||
using psycopg2, the DBAPI only allows serializers at the per-cursor
|
||||
or per-connection level. E.g.::
|
||||
|
||||
engine = create_engine("postgresql://scott:tiger@localhost/test",
|
||||
json_serializer=my_serialize_fn,
|
||||
json_deserializer=my_deserialize_fn
|
||||
)
|
||||
|
||||
When using the psycopg2 dialect, the json_deserializer is registered
|
||||
against the database using ``psycopg2.extras.register_default_json``.
|
||||
|
||||
.. versionadded:: 0.9
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'JSON'
|
||||
|
||||
class comparator_factory(sqltypes.Concatenable.Comparator):
|
||||
"""Define comparison operations for :class:`.JSON`."""
|
||||
|
||||
def __getitem__(self, other):
|
||||
"""Get the value at a given key."""
|
||||
|
||||
return JSONElement(self.expr, other)
|
||||
|
||||
def _adapt_expression(self, op, other_comparator):
|
||||
if isinstance(op, custom_op):
|
||||
if op.opstring == '->':
|
||||
return op, sqltypes.Text
|
||||
return sqltypes.Concatenable.Comparator.\
|
||||
_adapt_expression(self, op, other_comparator)
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
json_serializer = dialect._json_serializer or json.dumps
|
||||
if util.py2k:
|
||||
encoding = dialect.encoding
|
||||
def process(value):
|
||||
return json_serializer(value).encode(encoding)
|
||||
else:
|
||||
def process(value):
|
||||
return json_serializer(value)
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
json_deserializer = dialect._json_deserializer or json.loads
|
||||
if util.py2k:
|
||||
encoding = dialect.encoding
|
||||
def process(value):
|
||||
return json_deserializer(value.decode(encoding))
|
||||
else:
|
||||
def process(value):
|
||||
return json_deserializer(value)
|
||||
return process
|
||||
|
||||
|
||||
ischema_names['json'] = JSON
|
@ -39,9 +39,7 @@ class _PGNumeric(sqltypes.Numeric):
|
||||
def result_processor(self, dialect, coltype):
|
||||
if self.asdecimal:
|
||||
if coltype in _FLOAT_TYPES:
|
||||
return processors.to_decimal_processor_factory(
|
||||
decimal.Decimal,
|
||||
self._effective_decimal_return_scale)
|
||||
return processors.to_decimal_processor_factory(decimal.Decimal)
|
||||
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
|
||||
# pg8000 returns Decimal natively for 1700
|
||||
return None
|
||||
|
@ -102,7 +102,7 @@ This overrides the encoding specified in the Postgresql client configuration.
|
||||
:func:`.create_engine`.
|
||||
|
||||
SQLAlchemy can also be instructed to skip the usage of the psycopg2
|
||||
``UNICODE`` extension and to instead utilize its own unicode encode/decode
|
||||
``UNICODE`` extension and to instead utilize it's own unicode encode/decode
|
||||
services, which are normally reserved only for those DBAPIs that don't
|
||||
fully support unicode directly. Passing ``use_native_unicode=False`` to
|
||||
:func:`.create_engine` will disable usage of ``psycopg2.extensions.UNICODE``.
|
||||
@ -169,7 +169,7 @@ connection, a sequence like the following is performed:
|
||||
If this function returns a list of HSTORE identifiers, we then determine that
|
||||
the ``HSTORE`` extension is present.
|
||||
|
||||
2. If the ``use_native_hstore`` flag is at its default of ``True``, and
|
||||
2. If the ``use_native_hstore`` flag is at it's default of ``True``, and
|
||||
we've detected that ``HSTORE`` oids are available, the
|
||||
``psycopg2.extensions.register_hstore()`` extension is invoked for all
|
||||
connections.
|
||||
@ -191,7 +191,6 @@ may be more performant.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
import logging
|
||||
|
||||
@ -206,7 +205,6 @@ from .base import PGDialect, PGCompiler, \
|
||||
ENUM, ARRAY, _DECIMAL_TYPES, _FLOAT_TYPES,\
|
||||
_INT_TYPES
|
||||
from .hstore import HSTORE
|
||||
from .json import JSON
|
||||
|
||||
|
||||
logger = logging.getLogger('sqlalchemy.dialects.postgresql')
|
||||
@ -219,9 +217,7 @@ class _PGNumeric(sqltypes.Numeric):
|
||||
def result_processor(self, dialect, coltype):
|
||||
if self.asdecimal:
|
||||
if coltype in _FLOAT_TYPES:
|
||||
return processors.to_decimal_processor_factory(
|
||||
decimal.Decimal,
|
||||
self._effective_decimal_return_scale)
|
||||
return processors.to_decimal_processor_factory(decimal.Decimal)
|
||||
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
|
||||
# pg8000 returns Decimal natively for 1700
|
||||
return None
|
||||
@ -240,13 +236,25 @@ class _PGNumeric(sqltypes.Numeric):
|
||||
|
||||
|
||||
class _PGEnum(ENUM):
|
||||
def result_processor(self, dialect, coltype):
|
||||
if util.py2k and self.convert_unicode is True:
|
||||
# we can't easily use PG's extensions here because
|
||||
# the OID is on the fly, and we need to give it a python
|
||||
# function anyway - not really worth it.
|
||||
self.convert_unicode = "force_nocheck"
|
||||
return super(_PGEnum, self).result_processor(dialect, coltype)
|
||||
def __init__(self, *arg, **kw):
|
||||
super(_PGEnum, self).__init__(*arg, **kw)
|
||||
# Py2K
|
||||
if self.convert_unicode:
|
||||
self.convert_unicode = "force"
|
||||
# end Py2K
|
||||
|
||||
|
||||
class _PGArray(ARRAY):
|
||||
def __init__(self, *arg, **kw):
|
||||
super(_PGArray, self).__init__(*arg, **kw)
|
||||
# Py2K
|
||||
# FIXME: this check won't work for setups that
|
||||
# have convert_unicode only on their create_engine().
|
||||
if isinstance(self.item_type, sqltypes.String) and \
|
||||
self.item_type.convert_unicode:
|
||||
self.item_type.convert_unicode = "force"
|
||||
# end Py2K
|
||||
|
||||
|
||||
class _PGHStore(HSTORE):
|
||||
def bind_processor(self, dialect):
|
||||
@ -261,15 +269,6 @@ class _PGHStore(HSTORE):
|
||||
else:
|
||||
return super(_PGHStore, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGJSON(JSON):
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect._has_native_json:
|
||||
return None
|
||||
else:
|
||||
return super(_PGJSON, self).result_processor(dialect, coltype)
|
||||
|
||||
# When we're handed literal SQL, ensure it's a SELECT-query. Since
|
||||
# 8.3, combining cursors and "FOR UPDATE" has been fine.
|
||||
SERVER_SIDE_CURSOR_RE = re.compile(
|
||||
@ -343,9 +342,9 @@ class PGIdentifierPreparer_psycopg2(PGIdentifierPreparer):
|
||||
|
||||
class PGDialect_psycopg2(PGDialect):
|
||||
driver = 'psycopg2'
|
||||
if util.py2k:
|
||||
supports_unicode_statements = False
|
||||
|
||||
# Py2K
|
||||
supports_unicode_statements = False
|
||||
# end Py2K
|
||||
default_paramstyle = 'pyformat'
|
||||
supports_sane_multi_rowcount = False # set to true based on psycopg2 version
|
||||
execution_ctx_cls = PGExecutionContext_psycopg2
|
||||
@ -354,7 +353,6 @@ class PGDialect_psycopg2(PGDialect):
|
||||
psycopg2_version = (0, 0)
|
||||
|
||||
_has_native_hstore = False
|
||||
_has_native_json = False
|
||||
|
||||
colspecs = util.update_copy(
|
||||
PGDialect.colspecs,
|
||||
@ -362,8 +360,8 @@ class PGDialect_psycopg2(PGDialect):
|
||||
sqltypes.Numeric: _PGNumeric,
|
||||
ENUM: _PGEnum, # needs force_unicode
|
||||
sqltypes.Enum: _PGEnum, # needs force_unicode
|
||||
ARRAY: _PGArray, # needs force_unicode
|
||||
HSTORE: _PGHStore,
|
||||
JSON: _PGJSON
|
||||
}
|
||||
)
|
||||
|
||||
@ -391,7 +389,6 @@ class PGDialect_psycopg2(PGDialect):
|
||||
self._has_native_hstore = self.use_native_hstore and \
|
||||
self._hstore_oids(connection.connection) \
|
||||
is not None
|
||||
self._has_native_json = self.psycopg2_version >= (2, 5)
|
||||
|
||||
# http://initd.org/psycopg/docs/news.html#what-s-new-in-psycopg-2-0-9
|
||||
self.supports_sane_multi_rowcount = self.psycopg2_version >= (2, 0, 9)
|
||||
@ -403,7 +400,7 @@ class PGDialect_psycopg2(PGDialect):
|
||||
|
||||
@util.memoized_property
|
||||
def _isolation_lookup(self):
|
||||
from psycopg2 import extensions
|
||||
extensions = __import__('psycopg2.extensions').extensions
|
||||
return {
|
||||
'AUTOCOMMIT': extensions.ISOLATION_LEVEL_AUTOCOMMIT,
|
||||
'READ COMMITTED': extensions.ISOLATION_LEVEL_READ_COMMITTED,
|
||||
@ -441,7 +438,6 @@ class PGDialect_psycopg2(PGDialect):
|
||||
if self.dbapi and self.use_native_unicode:
|
||||
def on_connect(conn):
|
||||
extensions.register_type(extensions.UNICODE, conn)
|
||||
extensions.register_type(extensions.UNICODEARRAY, conn)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.dbapi and self.use_native_hstore:
|
||||
@ -458,11 +454,6 @@ class PGDialect_psycopg2(PGDialect):
|
||||
array_oid=array_oid)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.dbapi and self._json_deserializer:
|
||||
def on_connect(conn):
|
||||
extras.register_default_json(conn, loads=self._json_deserializer)
|
||||
fns.append(on_connect)
|
||||
|
||||
if fns:
|
||||
def on_connect(conn):
|
||||
for fn in fns:
|
||||
@ -505,9 +496,7 @@ class PGDialect_psycopg2(PGDialect):
|
||||
'cursor already closed',
|
||||
# not sure where this path is originally from, it may
|
||||
# be obsolete. It really says "losed", not "closed".
|
||||
'losed the connection unexpectedly',
|
||||
# this can occur in newer SSL
|
||||
'connection has been closed unexpectedly'
|
||||
'losed the connection unexpectedly'
|
||||
]:
|
||||
idx = str_e.find(msg)
|
||||
if idx >= 0 and '"' not in str_e[:idx]:
|
||||
|
@ -9,11 +9,13 @@ from sqlalchemy.dialects.sqlite import base, pysqlite
|
||||
# default dialect
|
||||
base.dialect = pysqlite.dialect
|
||||
|
||||
from sqlalchemy.dialects.sqlite.base import (
|
||||
BLOB, BOOLEAN, CHAR, DATE, DATETIME, DECIMAL, FLOAT, INTEGER, REAL,
|
||||
NUMERIC, SMALLINT, TEXT, TIME, TIMESTAMP, VARCHAR, dialect,
|
||||
)
|
||||
|
||||
__all__ = ('BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME', 'DECIMAL',
|
||||
'FLOAT', 'INTEGER', 'NUMERIC', 'SMALLINT', 'TEXT', 'TIME',
|
||||
'TIMESTAMP', 'VARCHAR', 'REAL', 'dialect')
|
||||
from sqlalchemy.dialects.sqlite.base import \
|
||||
BLOB, BOOLEAN, CHAR, DATE, DATETIME, DECIMAL, FLOAT, INTEGER, REAL,\
|
||||
NUMERIC, SMALLINT, TEXT, TIME, TIMESTAMP, VARCHAR, dialect
|
||||
|
||||
__all__ = (
|
||||
'BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME', 'DECIMAL', 'FLOAT',
|
||||
'INTEGER', 'NUMERIC', 'SMALLINT', 'TEXT', 'TIME', 'TIMESTAMP', 'VARCHAR',
|
||||
'REAL', 'dialect'
|
||||
)
|
||||
|
@ -12,15 +12,15 @@
|
||||
Date and Time Types
|
||||
-------------------
|
||||
|
||||
SQLite does not have built-in DATE, TIME, or DATETIME types, and pysqlite does
|
||||
not provide out of the box functionality for translating values between Python
|
||||
`datetime` objects and a SQLite-supported format. SQLAlchemy's own
|
||||
SQLite does not have built-in DATE, TIME, or DATETIME types, and pysqlite
|
||||
does not provide out of the box functionality for translating values between
|
||||
Python `datetime` objects and a SQLite-supported format. SQLAlchemy's own
|
||||
:class:`~sqlalchemy.types.DateTime` and related types provide date formatting
|
||||
and parsing functionality when SQlite is used. The implementation classes are
|
||||
:class:`~.sqlite.DATETIME`, :class:`~.sqlite.DATE` and :class:`~.sqlite.TIME`.
|
||||
These types represent dates and times as ISO formatted strings, which also
|
||||
nicely support ordering. There's no reliance on typical "libc" internals for
|
||||
these functions so historical dates are fully supported.
|
||||
nicely support ordering. There's no reliance on typical "libc" internals
|
||||
for these functions so historical dates are fully supported.
|
||||
|
||||
Auto Incrementing Behavior
|
||||
--------------------------
|
||||
@ -30,15 +30,15 @@ Background on SQLite's autoincrement is at: http://sqlite.org/autoinc.html
|
||||
Two things to note:
|
||||
|
||||
* The AUTOINCREMENT keyword is **not** required for SQLite tables to
|
||||
generate primary key values automatically. AUTOINCREMENT only means that the
|
||||
algorithm used to generate ROWID values should be slightly different.
|
||||
generate primary key values automatically. AUTOINCREMENT only means that
|
||||
the algorithm used to generate ROWID values should be slightly different.
|
||||
* SQLite does **not** generate primary key (i.e. ROWID) values, even for
|
||||
one column, if the table has a composite (i.e. multi-column) primary key.
|
||||
This is regardless of the AUTOINCREMENT keyword being present or not.
|
||||
|
||||
To specifically render the AUTOINCREMENT keyword on the primary key column when
|
||||
rendering DDL, add the flag ``sqlite_autoincrement=True`` to the Table
|
||||
construct::
|
||||
To specifically render the AUTOINCREMENT keyword on the primary key
|
||||
column when rendering DDL, add the flag ``sqlite_autoincrement=True``
|
||||
to the Table construct::
|
||||
|
||||
Table('sometable', metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
@ -47,46 +47,47 @@ construct::
|
||||
Transaction Isolation Level
|
||||
---------------------------
|
||||
|
||||
:func:`.create_engine` accepts an ``isolation_level`` parameter which results
|
||||
in the command ``PRAGMA read_uncommitted <level>`` being invoked for every new
|
||||
connection. Valid values for this parameter are ``SERIALIZABLE`` and ``READ
|
||||
UNCOMMITTED`` corresponding to a value of 0 and 1, respectively. See the
|
||||
section :ref:`pysqlite_serializable` for an important workaround when using
|
||||
serializable isolation with Pysqlite.
|
||||
:func:`.create_engine` accepts an ``isolation_level`` parameter which
|
||||
results in the command ``PRAGMA read_uncommitted <level>`` being invoked for
|
||||
every new connection. Valid values for this parameter are ``SERIALIZABLE``
|
||||
and ``READ UNCOMMITTED`` corresponding to a value of 0 and 1, respectively.
|
||||
See the section :ref:`pysqlite_serializable` for an important workaround
|
||||
when using serializable isolation with Pysqlite.
|
||||
|
||||
Database Locking Behavior / Concurrency
|
||||
---------------------------------------
|
||||
|
||||
Note that SQLite is not designed for a high level of concurrency. The database
|
||||
itself, being a file, is locked completely during write operations and within
|
||||
transactions, meaning exactly one connection has exclusive access to the
|
||||
database during this period - all other connections will be blocked during this
|
||||
time.
|
||||
Note that SQLite is not designed for a high level of concurrency. The
|
||||
database itself, being a file, is locked completely during write operations
|
||||
and within transactions, meaning exactly one connection has exclusive access
|
||||
to the database during this period - all other connections will be blocked
|
||||
during this time.
|
||||
|
||||
The Python DBAPI specification also calls for a connection model that is always
|
||||
in a transaction; there is no BEGIN method, only commit and rollback. This
|
||||
in a transaction; there is no BEGIN method, only commit and rollback. This
|
||||
implies that a SQLite DBAPI driver would technically allow only serialized
|
||||
access to a particular database file at all times. The pysqlite driver attempts
|
||||
to ameliorate this by deferring the actual BEGIN statement until the first DML
|
||||
(INSERT, UPDATE, or DELETE) is received within a transaction. While this breaks
|
||||
serializable isolation, it at least delays the exclusive locking inherent in
|
||||
SQLite's design.
|
||||
access to a particular database file at all times. The pysqlite driver
|
||||
attempts to ameliorate this by deferring the actual BEGIN statement until
|
||||
the first DML (INSERT, UPDATE, or DELETE) is received within a
|
||||
transaction. While this breaks serializable isolation, it at least delays
|
||||
the exclusive locking inherent in SQLite's design.
|
||||
|
||||
SQLAlchemy's default mode of usage with the ORM is known as "autocommit=False",
|
||||
which means the moment the :class:`.Session` begins to be used, a transaction
|
||||
is begun. As the :class:`.Session` is used, the autoflush feature, also on by
|
||||
default, will flush out pending changes to the database before each query. The
|
||||
effect of this is that a :class:`.Session` used in its default mode will often
|
||||
emit DML early on, long before the transaction is actually committed. This
|
||||
again will have the effect of serializing access to the SQLite database. If
|
||||
highly concurrent reads are desired against the SQLite database, it is advised
|
||||
that the autoflush feature be disabled, and potentially even that autocommit be
|
||||
re-enabled, which has the effect of each SQL statement and flush committing
|
||||
changes immediately.
|
||||
SQLAlchemy's default mode of usage with the ORM is known
|
||||
as "autocommit=False", which means the moment the :class:`.Session` begins to
|
||||
be used, a transaction is begun. As the :class:`.Session` is used, the
|
||||
autoflush feature, also on by default, will flush out pending changes to the
|
||||
database before each query. The effect of this is that a :class:`.Session`
|
||||
used in its default mode will often emit DML early on, long before the
|
||||
transaction is actually committed. This again will have the effect of
|
||||
serializing access to the SQLite database. If highly concurrent reads are
|
||||
desired against the SQLite database, it is advised that the autoflush feature
|
||||
be disabled, and potentially even that autocommit be re-enabled, which has
|
||||
the effect of each SQL statement and flush committing changes immediately.
|
||||
|
||||
For more information on SQLite's lack of concurrency by design, please see
|
||||
`Situations Where Another RDBMS May Work Better - High Concurrency
|
||||
<http://www.sqlite.org/whentouse.html>`_ near the bottom of the page.
|
||||
For more information on SQLite's lack of concurrency by design, please
|
||||
see `Situations Where Another RDBMS May Work Better - High
|
||||
Concurrency <http://www.sqlite.org/whentouse.html>`_ near the bottom of
|
||||
the page.
|
||||
|
||||
.. _sqlite_foreign_keys:
|
||||
|
||||
@ -94,19 +95,19 @@ Foreign Key Support
|
||||
-------------------
|
||||
|
||||
SQLite supports FOREIGN KEY syntax when emitting CREATE statements for tables,
|
||||
however by default these constraints have no effect on the operation of the
|
||||
table.
|
||||
however by default these constraints have no effect on the operation
|
||||
of the table.
|
||||
|
||||
Constraint checking on SQLite has three prerequisites:
|
||||
|
||||
* At least version 3.6.19 of SQLite must be in use
|
||||
* The SQLite library must be compiled *without* the SQLITE_OMIT_FOREIGN_KEY
|
||||
* The SQLite libary must be compiled *without* the SQLITE_OMIT_FOREIGN_KEY
|
||||
or SQLITE_OMIT_TRIGGER symbols enabled.
|
||||
* The ``PRAGMA foreign_keys = ON`` statement must be emitted on all connections
|
||||
before use.
|
||||
|
||||
SQLAlchemy allows for the ``PRAGMA`` statement to be emitted automatically for
|
||||
new connections through the usage of events::
|
||||
SQLAlchemy allows for the ``PRAGMA`` statement to be emitted automatically
|
||||
for new connections through the usage of events::
|
||||
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy import event
|
||||
@ -119,75 +120,26 @@ new connections through the usage of events::
|
||||
|
||||
.. seealso::
|
||||
|
||||
`SQLite Foreign Key Support <http://www.sqlite.org/foreignkeys.html>`_ - on
|
||||
the SQLite web site.
|
||||
`SQLite Foreign Key Support <http://www.sqlite.org/foreignkeys.html>`_ -
|
||||
on the SQLite web site.
|
||||
|
||||
:ref:`event_toplevel` - SQLAlchemy event API.
|
||||
|
||||
.. _sqlite_type_reflection:
|
||||
|
||||
Type Reflection
|
||||
---------------
|
||||
|
||||
SQLite types are unlike those of most other database backends, in that
|
||||
the string name of the type usually does not correspond to a "type" in a
|
||||
one-to-one fashion. Instead, SQLite links per-column typing behavior
|
||||
to one of five so-called "type affinities" based on a string matching
|
||||
pattern for the type.
|
||||
|
||||
SQLAlchemy's reflection process, when inspecting types, uses a simple
|
||||
lookup table to link the keywords returned to provided SQLAlchemy types.
|
||||
This lookup table is present within the SQLite dialect as it is for all
|
||||
other dialects. However, the SQLite dialect has a different "fallback"
|
||||
routine for when a particular type name is not located in the lookup map;
|
||||
it instead implements the SQLite "type affinity" scheme located at
|
||||
http://www.sqlite.org/datatype3.html section 2.1.
|
||||
|
||||
The provided typemap will make direct associations from an exact string
|
||||
name match for the following types:
|
||||
|
||||
:class:`~.types.BIGINT`, :class:`~.types.BLOB`,
|
||||
:class:`~.types.BOOLEAN`, :class:`~.types.BOOLEAN`,
|
||||
:class:`~.types.CHAR`, :class:`~.types.DATE`,
|
||||
:class:`~.types.DATETIME`, :class:`~.types.FLOAT`,
|
||||
:class:`~.types.DECIMAL`, :class:`~.types.FLOAT`,
|
||||
:class:`~.types.INTEGER`, :class:`~.types.INTEGER`,
|
||||
:class:`~.types.NUMERIC`, :class:`~.types.REAL`,
|
||||
:class:`~.types.SMALLINT`, :class:`~.types.TEXT`,
|
||||
:class:`~.types.TIME`, :class:`~.types.TIMESTAMP`,
|
||||
:class:`~.types.VARCHAR`, :class:`~.types.NVARCHAR`,
|
||||
:class:`~.types.NCHAR`
|
||||
|
||||
When a type name does not match one of the above types, the "type affinity"
|
||||
lookup is used instead:
|
||||
|
||||
* :class:`~.types.INTEGER` is returned if the type name includes the
|
||||
string ``INT``
|
||||
* :class:`~.types.TEXT` is returned if the type name includes the
|
||||
string ``CHAR``, ``CLOB`` or ``TEXT``
|
||||
* :class:`~.types.NullType` is returned if the type name includes the
|
||||
string ``BLOB``
|
||||
* :class:`~.types.REAL` is returned if the type name includes the string
|
||||
``REAL``, ``FLOA`` or ``DOUB``.
|
||||
* Otherwise, the :class:`~.types.NUMERIC` type is used.
|
||||
|
||||
.. versionadded:: 0.9.3 Support for SQLite type affinity rules when reflecting
|
||||
columns.
|
||||
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import re
|
||||
|
||||
from ... import processors
|
||||
from ... import sql, exc
|
||||
from ... import types as sqltypes, schema as sa_schema
|
||||
from ... import util
|
||||
from ...engine import default, reflection
|
||||
from ...sql import compiler
|
||||
from sqlalchemy import sql, exc
|
||||
from sqlalchemy.engine import default, base, reflection
|
||||
from sqlalchemy import types as sqltypes
|
||||
from sqlalchemy import util
|
||||
from sqlalchemy.sql import compiler
|
||||
from sqlalchemy import processors
|
||||
|
||||
from ...types import (BLOB, BOOLEAN, CHAR, DATE, DECIMAL, FLOAT, INTEGER, REAL,
|
||||
NUMERIC, SMALLINT, TEXT, TIMESTAMP, VARCHAR)
|
||||
from sqlalchemy.types import BIGINT, BLOB, BOOLEAN, CHAR,\
|
||||
DECIMAL, FLOAT, REAL, INTEGER, NUMERIC, SMALLINT, TEXT,\
|
||||
TIMESTAMP, VARCHAR
|
||||
|
||||
|
||||
class _DateTimeMixin(object):
|
||||
@ -202,19 +154,11 @@ class _DateTimeMixin(object):
|
||||
self._storage_format = storage_format
|
||||
|
||||
def adapt(self, cls, **kw):
|
||||
if issubclass(cls, _DateTimeMixin):
|
||||
if self._storage_format:
|
||||
kw["storage_format"] = self._storage_format
|
||||
if self._reg:
|
||||
kw["regexp"] = self._reg
|
||||
return super(_DateTimeMixin, self).adapt(cls, **kw)
|
||||
|
||||
def literal_processor(self, dialect):
|
||||
bp = self.bind_processor(dialect)
|
||||
def process(value):
|
||||
return "'%s'" % bp(value)
|
||||
return process
|
||||
|
||||
if self._storage_format:
|
||||
kw["storage_format"] = self._storage_format
|
||||
if self._reg:
|
||||
kw["regexp"] = self._reg
|
||||
return util.constructor_copy(self, cls, **kw)
|
||||
|
||||
class DATETIME(_DateTimeMixin, sqltypes.DateTime):
|
||||
"""Represent a Python datetime object in SQLite using a string.
|
||||
@ -238,14 +182,14 @@ class DATETIME(_DateTimeMixin, sqltypes.DateTime):
|
||||
regexp=r"(\d+)/(\d+)/(\d+) (\d+)-(\d+)-(\d+)"
|
||||
)
|
||||
|
||||
:param storage_format: format string which will be applied to the dict with
|
||||
keys year, month, day, hour, minute, second, and microsecond.
|
||||
:param storage_format: format string which will be applied to the
|
||||
dict with keys year, month, day, hour, minute, second, and microsecond.
|
||||
|
||||
:param regexp: regular expression which will be applied to incoming result
|
||||
rows. If the regexp contains named groups, the resulting match dict is
|
||||
applied to the Python datetime() constructor as keyword arguments.
|
||||
Otherwise, if positional groups are used, the datetime() constructor
|
||||
is called with positional arguments via
|
||||
:param regexp: regular expression which will be applied to
|
||||
incoming result rows. If the regexp contains named groups, the
|
||||
resulting match dict is applied to the Python datetime() constructor
|
||||
as keyword arguments. Otherwise, if positional groups are used, the
|
||||
the datetime() constructor is called with positional arguments via
|
||||
``*map(int, match_obj.groups(0))``.
|
||||
"""
|
||||
|
||||
@ -337,7 +281,7 @@ class DATE(_DateTimeMixin, sqltypes.Date):
|
||||
incoming result rows. If the regexp contains named groups, the
|
||||
resulting match dict is applied to the Python date() constructor
|
||||
as keyword arguments. Otherwise, if positional groups are used, the
|
||||
date() constructor is called with positional arguments via
|
||||
the date() constructor is called with positional arguments via
|
||||
``*map(int, match_obj.groups(0))``.
|
||||
"""
|
||||
|
||||
@ -391,14 +335,15 @@ class TIME(_DateTimeMixin, sqltypes.Time):
|
||||
regexp=re.compile("(\d+)-(\d+)-(\d+)-(?:-(\d+))?")
|
||||
)
|
||||
|
||||
:param storage_format: format string which will be applied to the dict with
|
||||
keys hour, minute, second, and microsecond.
|
||||
:param storage_format: format string which will be applied to the
|
||||
dict with keys hour, minute, second, and microsecond.
|
||||
|
||||
:param regexp: regular expression which will be applied to incoming result
|
||||
rows. If the regexp contains named groups, the resulting match dict is
|
||||
applied to the Python time() constructor as keyword arguments. Otherwise,
|
||||
if positional groups are used, the time() constructor is called with
|
||||
positional arguments via ``*map(int, match_obj.groups(0))``.
|
||||
:param regexp: regular expression which will be applied to
|
||||
incoming result rows. If the regexp contains named groups, the
|
||||
resulting match dict is applied to the Python time() constructor
|
||||
as keyword arguments. Otherwise, if positional groups are used, the
|
||||
the time() constructor is called with positional arguments via
|
||||
``*map(int, match_obj.groups(0))``.
|
||||
"""
|
||||
|
||||
_storage_format = "%(hour)02d:%(minute)02d:%(second)02d.%(microsecond)06d"
|
||||
@ -453,7 +398,6 @@ ischema_names = {
|
||||
'CHAR': sqltypes.CHAR,
|
||||
'DATE': sqltypes.DATE,
|
||||
'DATETIME': sqltypes.DATETIME,
|
||||
'DOUBLE': sqltypes.FLOAT,
|
||||
'DECIMAL': sqltypes.DECIMAL,
|
||||
'FLOAT': sqltypes.FLOAT,
|
||||
'INT': sqltypes.INTEGER,
|
||||
@ -474,17 +418,17 @@ class SQLiteCompiler(compiler.SQLCompiler):
|
||||
extract_map = util.update_copy(
|
||||
compiler.SQLCompiler.extract_map,
|
||||
{
|
||||
'month': '%m',
|
||||
'day': '%d',
|
||||
'year': '%Y',
|
||||
'second': '%S',
|
||||
'hour': '%H',
|
||||
'doy': '%j',
|
||||
'minute': '%M',
|
||||
'epoch': '%s',
|
||||
'dow': '%w',
|
||||
'week': '%W',
|
||||
})
|
||||
'month': '%m',
|
||||
'day': '%d',
|
||||
'year': '%Y',
|
||||
'second': '%S',
|
||||
'hour': '%H',
|
||||
'doy': '%j',
|
||||
'minute': '%M',
|
||||
'epoch': '%s',
|
||||
'dow': '%w',
|
||||
'week': '%W'
|
||||
})
|
||||
|
||||
def visit_now_func(self, fn, **kw):
|
||||
return "CURRENT_TIMESTAMP"
|
||||
@ -503,9 +447,9 @@ class SQLiteCompiler(compiler.SQLCompiler):
|
||||
|
||||
def visit_cast(self, cast, **kwargs):
|
||||
if self.dialect.supports_cast:
|
||||
return super(SQLiteCompiler, self).visit_cast(cast, **kwargs)
|
||||
return super(SQLiteCompiler, self).visit_cast(cast)
|
||||
else:
|
||||
return self.process(cast.clause, **kwargs)
|
||||
return self.process(cast.clause)
|
||||
|
||||
def visit_extract(self, extract, **kw):
|
||||
try:
|
||||
@ -547,11 +491,11 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
|
||||
colspec += " NOT NULL"
|
||||
|
||||
if (column.primary_key and
|
||||
column.table.dialect_options['sqlite']['autoincrement'] and
|
||||
len(column.table.primary_key.columns) == 1 and
|
||||
issubclass(column.type._type_affinity, sqltypes.Integer) and
|
||||
not column.foreign_keys):
|
||||
colspec += " PRIMARY KEY AUTOINCREMENT"
|
||||
column.table.kwargs.get('sqlite_autoincrement', False) and
|
||||
len(column.table.primary_key.columns) == 1 and
|
||||
issubclass(column.type._type_affinity, sqltypes.Integer) and
|
||||
not column.foreign_keys):
|
||||
colspec += " PRIMARY KEY AUTOINCREMENT"
|
||||
|
||||
return colspec
|
||||
|
||||
@ -561,25 +505,24 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
|
||||
# with the column itself.
|
||||
if len(constraint.columns) == 1:
|
||||
c = list(constraint)[0]
|
||||
if (c.primary_key and
|
||||
c.table.dialect_options['sqlite']['autoincrement'] and
|
||||
issubclass(c.type._type_affinity, sqltypes.Integer) and
|
||||
not c.foreign_keys):
|
||||
if c.primary_key and \
|
||||
c.table.kwargs.get('sqlite_autoincrement', False) and \
|
||||
issubclass(c.type._type_affinity, sqltypes.Integer) and \
|
||||
not c.foreign_keys:
|
||||
return None
|
||||
|
||||
return super(SQLiteDDLCompiler, self).visit_primary_key_constraint(
|
||||
constraint)
|
||||
return super(SQLiteDDLCompiler, self).\
|
||||
visit_primary_key_constraint(constraint)
|
||||
|
||||
def visit_foreign_key_constraint(self, constraint):
|
||||
|
||||
local_table = list(constraint._elements.values())[0].parent.table
|
||||
local_table = constraint._elements.values()[0].parent.table
|
||||
remote_table = list(constraint._elements.values())[0].column.table
|
||||
|
||||
if local_table.schema != remote_table.schema:
|
||||
return None
|
||||
else:
|
||||
return super(SQLiteDDLCompiler, self).visit_foreign_key_constraint(
|
||||
constraint)
|
||||
return super(SQLiteDDLCompiler, self).visit_foreign_key_constraint(constraint)
|
||||
|
||||
def define_constraint_remote_table(self, constraint, table, preparer):
|
||||
"""Format the remote table clause of a CREATE CONSTRAINT clause."""
|
||||
@ -587,8 +530,8 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
|
||||
return preparer.format_table(table, use_schema=False)
|
||||
|
||||
def visit_create_index(self, create):
|
||||
return super(SQLiteDDLCompiler, self).visit_create_index(
|
||||
create, include_table_schema=False)
|
||||
return super(SQLiteDDLCompiler, self).\
|
||||
visit_create_index(create, include_table_schema=False)
|
||||
|
||||
|
||||
class SQLiteTypeCompiler(compiler.GenericTypeCompiler):
|
||||
@ -625,10 +568,10 @@ class SQLiteIdentifierPreparer(compiler.IdentifierPreparer):
|
||||
name = index.name
|
||||
result = self.quote(name, index.quote)
|
||||
if (not self.omit_schema and
|
||||
use_schema and
|
||||
getattr(index.table, "schema", None)):
|
||||
result = self.quote_schema(index.table.schema,
|
||||
index.table.quote_schema) + "." + result
|
||||
use_schema and
|
||||
getattr(index.table, "schema", None)):
|
||||
result = self.quote_schema(
|
||||
index.table.schema, index.table.quote_schema) + "." + result
|
||||
return result
|
||||
|
||||
|
||||
@ -638,9 +581,11 @@ class SQLiteExecutionContext(default.DefaultExecutionContext):
|
||||
return self.execution_options.get("sqlite_raw_colnames", False)
|
||||
|
||||
def _translate_colname(self, colname):
|
||||
# adjust for dotted column names. SQLite in the case of UNION may store
|
||||
# col names as "tablename.colname" in cursor.description
|
||||
if not self._preserve_raw_colnames and "." in colname:
|
||||
# adjust for dotted column names. SQLite
|
||||
# in the case of UNION may store col names as
|
||||
# "tablename.colname"
|
||||
# in cursor.description
|
||||
if not self._preserve_raw_colnames and "." in colname:
|
||||
return colname.split(".")[1], colname
|
||||
else:
|
||||
return colname, None
|
||||
@ -655,7 +600,6 @@ class SQLiteDialect(default.DefaultDialect):
|
||||
supports_empty_insert = False
|
||||
supports_cast = True
|
||||
supports_multivalues_insert = True
|
||||
supports_right_nested_joins = False
|
||||
|
||||
default_paramstyle = 'qmark'
|
||||
execution_ctx_cls = SQLiteExecutionContext
|
||||
@ -670,39 +614,36 @@ class SQLiteDialect(default.DefaultDialect):
|
||||
supports_cast = True
|
||||
supports_default_values = True
|
||||
|
||||
construct_arguments = [
|
||||
(sa_schema.Table, {
|
||||
"autoincrement": False
|
||||
})
|
||||
]
|
||||
|
||||
_broken_fk_pragma_quotes = False
|
||||
|
||||
def __init__(self, isolation_level=None, native_datetime=False, **kwargs):
|
||||
default.DefaultDialect.__init__(self, **kwargs)
|
||||
self.isolation_level = isolation_level
|
||||
|
||||
# this flag used by pysqlite dialect, and perhaps others in the future,
|
||||
# to indicate the driver is handling date/timestamp conversions (and
|
||||
# perhaps datetime/time as well on some hypothetical driver ?)
|
||||
# this flag used by pysqlite dialect, and perhaps others in the
|
||||
# future, to indicate the driver is handling date/timestamp
|
||||
# conversions (and perhaps datetime/time as well on some
|
||||
# hypothetical driver ?)
|
||||
self.native_datetime = native_datetime
|
||||
|
||||
if self.dbapi is not None:
|
||||
self.supports_default_values = (
|
||||
self.dbapi.sqlite_version_info >= (3, 3, 8))
|
||||
self.supports_cast = (
|
||||
self.dbapi.sqlite_version_info >= (3, 2, 3))
|
||||
self.supports_multivalues_insert = (
|
||||
# http://www.sqlite.org/releaselog/3_7_11.html
|
||||
self.dbapi.sqlite_version_info >= (3, 7, 11))
|
||||
self.supports_default_values = \
|
||||
self.dbapi.sqlite_version_info >= (3, 3, 8)
|
||||
self.supports_cast = \
|
||||
self.dbapi.sqlite_version_info >= (3, 2, 3)
|
||||
self.supports_multivalues_insert = \
|
||||
self.dbapi.sqlite_version_info >= (3, 7, 11)
|
||||
# http://www.sqlite.org/releaselog/3_7_11.html
|
||||
|
||||
# see http://www.sqlalchemy.org/trac/ticket/2568
|
||||
# as well as http://www.sqlite.org/src/info/600482d161
|
||||
self._broken_fk_pragma_quotes = (
|
||||
self.dbapi.sqlite_version_info < (3, 6, 14))
|
||||
self._broken_fk_pragma_quotes = \
|
||||
self.dbapi.sqlite_version_info < (3, 6, 14)
|
||||
|
||||
|
||||
_isolation_lookup = {
|
||||
'READ UNCOMMITTED': 1,
|
||||
'SERIALIZABLE': 0,
|
||||
'SERIALIZABLE': 0
|
||||
}
|
||||
|
||||
def set_isolation_level(self, connection, level):
|
||||
@ -786,7 +727,7 @@ class SQLiteDialect(default.DefaultDialect):
|
||||
while not cursor.closed and cursor.fetchone() is not None:
|
||||
pass
|
||||
|
||||
return row is not None
|
||||
return (row is not None)
|
||||
|
||||
@reflection.cache
|
||||
def get_view_names(self, connection, schema=None, **kw):
|
||||
@ -812,6 +753,7 @@ class SQLiteDialect(default.DefaultDialect):
|
||||
|
||||
@reflection.cache
|
||||
def get_view_definition(self, connection, view_name, schema=None, **kw):
|
||||
quote = self.identifier_preparer.quote_identifier
|
||||
if schema is not None:
|
||||
qschema = self.identifier_preparer.quote_identifier(schema)
|
||||
master = '%s.sqlite_master' % qschema
|
||||
@ -849,18 +791,36 @@ class SQLiteDialect(default.DefaultDialect):
|
||||
rows = c.fetchall()
|
||||
columns = []
|
||||
for row in rows:
|
||||
(name, type_, nullable, default, primary_key) = (
|
||||
row[1], row[2].upper(), not row[3], row[4], row[5])
|
||||
(name, type_, nullable, default, primary_key) = \
|
||||
(row[1], row[2].upper(), not row[3],
|
||||
row[4], row[5])
|
||||
|
||||
columns.append(self._get_column_info(name, type_, nullable,
|
||||
default, primary_key))
|
||||
default, primary_key))
|
||||
return columns
|
||||
|
||||
def _get_column_info(self, name, type_, nullable, default, primary_key):
|
||||
coltype = self._resolve_type_affinity(type_)
|
||||
def _get_column_info(self, name, type_, nullable,
|
||||
default, primary_key):
|
||||
|
||||
match = re.match(r'(\w+)(\(.*?\))?', type_)
|
||||
if match:
|
||||
coltype = match.group(1)
|
||||
args = match.group(2)
|
||||
else:
|
||||
coltype = "VARCHAR"
|
||||
args = ''
|
||||
try:
|
||||
coltype = self.ischema_names[coltype]
|
||||
if args is not None:
|
||||
args = re.findall(r'(\d+)', args)
|
||||
coltype = coltype(*[int(a) for a in args])
|
||||
except KeyError:
|
||||
util.warn("Did not recognize type '%s' of column '%s'" %
|
||||
(coltype, name))
|
||||
coltype = sqltypes.NullType()
|
||||
|
||||
if default is not None:
|
||||
default = util.text_type(default)
|
||||
default = unicode(default)
|
||||
|
||||
return {
|
||||
'name': name,
|
||||
@ -868,62 +828,9 @@ class SQLiteDialect(default.DefaultDialect):
|
||||
'nullable': nullable,
|
||||
'default': default,
|
||||
'autoincrement': default is None,
|
||||
'primary_key': primary_key,
|
||||
'primary_key': primary_key
|
||||
}
|
||||
|
||||
def _resolve_type_affinity(self, type_):
|
||||
"""Return a data type from a reflected column, using affinity tules.
|
||||
|
||||
SQLite's goal for universal compatibility introduces some complexity
|
||||
during reflection, as a column's defined type might not actually be a
|
||||
type that SQLite understands - or indeed, my not be defined *at all*.
|
||||
Internally, SQLite handles this with a 'data type affinity' for each
|
||||
column definition, mapping to one of 'TEXT', 'NUMERIC', 'INTEGER',
|
||||
'REAL', or 'NONE' (raw bits). The algorithm that determines this is
|
||||
listed in http://www.sqlite.org/datatype3.html section 2.1.
|
||||
|
||||
This method allows SQLAlchemy to support that algorithm, while still
|
||||
providing access to smarter reflection utilities by regcognizing
|
||||
column definitions that SQLite only supports through affinity (like
|
||||
DATE and DOUBLE).
|
||||
|
||||
"""
|
||||
match = re.match(r'([\w ]+)(\(.*?\))?', type_)
|
||||
if match:
|
||||
coltype = match.group(1)
|
||||
args = match.group(2)
|
||||
else:
|
||||
coltype = ''
|
||||
args = ''
|
||||
|
||||
if coltype in self.ischema_names:
|
||||
coltype = self.ischema_names[coltype]
|
||||
elif 'INT' in coltype:
|
||||
coltype = sqltypes.INTEGER
|
||||
elif 'CHAR' in coltype or 'CLOB' in coltype or 'TEXT' in coltype:
|
||||
coltype = sqltypes.TEXT
|
||||
elif 'BLOB' in coltype or not coltype:
|
||||
coltype = sqltypes.NullType
|
||||
elif 'REAL' in coltype or 'FLOA' in coltype or 'DOUB' in coltype:
|
||||
coltype = sqltypes.REAL
|
||||
else:
|
||||
coltype = sqltypes.NUMERIC
|
||||
|
||||
if args is not None:
|
||||
args = re.findall(r'(\d+)', args)
|
||||
try:
|
||||
coltype = coltype(*[int(a) for a in args])
|
||||
except TypeError:
|
||||
util.warn(
|
||||
"Could not instantiate type %s with "
|
||||
"reflected arguments %s; using no arguments." %
|
||||
(coltype, args))
|
||||
coltype = coltype()
|
||||
else:
|
||||
coltype = coltype()
|
||||
|
||||
return coltype
|
||||
|
||||
@reflection.cache
|
||||
def get_pk_constraint(self, connection, table_name, schema=None, **kw):
|
||||
cols = self.get_columns(connection, table_name, schema, **kw)
|
||||
@ -955,8 +862,8 @@ class SQLiteDialect(default.DefaultDialect):
|
||||
return fkeys
|
||||
|
||||
def _parse_fk(self, fks, fkeys, numerical_id, rtbl, lcol, rcol):
|
||||
# sqlite won't return rcol if the table was created with REFERENCES
|
||||
# <tablename>, no col
|
||||
# sqlite won't return rcol if the table
|
||||
# was created with REFERENCES <tablename>, no col
|
||||
if rcol is None:
|
||||
rcol = lcol
|
||||
|
||||
@ -971,7 +878,7 @@ class SQLiteDialect(default.DefaultDialect):
|
||||
'constrained_columns': [],
|
||||
'referred_schema': None,
|
||||
'referred_table': rtbl,
|
||||
'referred_columns': [],
|
||||
'referred_columns': []
|
||||
}
|
||||
fkeys.append(fk)
|
||||
fks[numerical_id] = fk
|
||||
@ -1033,8 +940,7 @@ class SQLiteDialect(default.DefaultDialect):
|
||||
|
||||
UNIQUE_PATTERN = 'CONSTRAINT (\w+) UNIQUE \(([^\)]+)\)'
|
||||
return [
|
||||
{'name': name,
|
||||
'column_names': [col.strip(' "') for col in cols.split(',')]}
|
||||
{'name': name, 'column_names': [c.strip(' "') for c in cols.split(',')]}
|
||||
for name, cols in re.findall(UNIQUE_PATTERN, table_data)
|
||||
]
|
||||
|
||||
|
@ -162,8 +162,8 @@ Using Temporary Tables with SQLite
|
||||
Due to the way SQLite deals with temporary tables, if you wish to use a
|
||||
temporary table in a file-based SQLite database across multiple checkouts
|
||||
from the connection pool, such as when using an ORM :class:`.Session` where
|
||||
the temporary table should continue to remain after :meth:`.Session.commit` or
|
||||
:meth:`.Session.rollback` is called, a pool which maintains a single connection must
|
||||
the temporary table should continue to remain after :meth:`.commit` or
|
||||
:meth:`.rollback` is called, a pool which maintains a single connection must
|
||||
be used. Use :class:`.SingletonThreadPool` if the scope is only needed
|
||||
within the current thread, or :class:`.StaticPool` is scope is needed within
|
||||
multiple threads for this case::
|
||||
@ -269,8 +269,8 @@ class SQLiteDialect_pysqlite(SQLiteDialect):
|
||||
}
|
||||
)
|
||||
|
||||
if not util.py2k:
|
||||
description_encoding = None
|
||||
# Py3K
|
||||
#description_encoding = None
|
||||
|
||||
driver = 'pysqlite'
|
||||
|
||||
@ -290,7 +290,7 @@ class SQLiteDialect_pysqlite(SQLiteDialect):
|
||||
def dbapi(cls):
|
||||
try:
|
||||
from pysqlite2 import dbapi2 as sqlite
|
||||
except ImportError as e:
|
||||
except ImportError, e:
|
||||
try:
|
||||
from sqlite3 import dbapi2 as sqlite # try 2.5+ stdlib name.
|
||||
except ImportError:
|
||||
|
@ -9,7 +9,7 @@ from sqlalchemy.dialects.sybase import base, pysybase, pyodbc
|
||||
# default dialect
|
||||
base.dialect = pyodbc.dialect
|
||||
|
||||
from .base import CHAR, VARCHAR, TIME, NCHAR, NVARCHAR,\
|
||||
from base import CHAR, VARCHAR, TIME, NCHAR, NVARCHAR,\
|
||||
TEXT, DATE, DATETIME, FLOAT, NUMERIC,\
|
||||
BIGINT, INT, INTEGER, SMALLINT, BINARY,\
|
||||
VARBINARY, UNITEXT, UNICHAR, UNIVARCHAR,\
|
||||
|
@ -440,8 +440,6 @@ class SybaseDialect(default.DefaultDialect):
|
||||
preparer = SybaseIdentifierPreparer
|
||||
inspector = SybaseInspector
|
||||
|
||||
construct_arguments = []
|
||||
|
||||
def _get_default_schema_name(self, connection):
|
||||
return connection.scalar(
|
||||
text("SELECT user_name() as user_name",
|
||||
@ -477,11 +475,12 @@ class SybaseDialect(default.DefaultDialect):
|
||||
AND o.type in ('U', 'V')
|
||||
""")
|
||||
|
||||
if util.py2k:
|
||||
if isinstance(schema, unicode):
|
||||
schema = schema.encode("ascii")
|
||||
if isinstance(table_name, unicode):
|
||||
table_name = table_name.encode("ascii")
|
||||
# Py2K
|
||||
if isinstance(schema, unicode):
|
||||
schema = schema.encode("ascii")
|
||||
if isinstance(table_name, unicode):
|
||||
table_name = table_name.encode("ascii")
|
||||
# end Py2K
|
||||
result = connection.execute(TABLEID_SQL,
|
||||
schema_name=schema,
|
||||
table_name=table_name)
|
||||
@ -760,10 +759,10 @@ class SybaseDialect(default.DefaultDialect):
|
||||
AND o.type = 'U'
|
||||
""")
|
||||
|
||||
if util.py2k:
|
||||
if isinstance(schema, unicode):
|
||||
schema = schema.encode("ascii")
|
||||
|
||||
# Py2K
|
||||
if isinstance(schema, unicode):
|
||||
schema = schema.encode("ascii")
|
||||
# end Py2K
|
||||
tables = connection.execute(TABLE_SQL, schema_name=schema)
|
||||
|
||||
return [t["name"] for t in tables]
|
||||
@ -780,10 +779,10 @@ class SybaseDialect(default.DefaultDialect):
|
||||
AND o.type = 'V'
|
||||
""")
|
||||
|
||||
if util.py2k:
|
||||
if isinstance(view_name, unicode):
|
||||
view_name = view_name.encode("ascii")
|
||||
|
||||
# Py2K
|
||||
if isinstance(view_name, unicode):
|
||||
view_name = view_name.encode("ascii")
|
||||
# end Py2K
|
||||
view = connection.execute(VIEW_DEF_SQL, view_name=view_name)
|
||||
|
||||
return view.scalar()
|
||||
@ -800,9 +799,10 @@ class SybaseDialect(default.DefaultDialect):
|
||||
AND o.type = 'V'
|
||||
""")
|
||||
|
||||
if util.py2k:
|
||||
if isinstance(schema, unicode):
|
||||
schema = schema.encode("ascii")
|
||||
# Py2K
|
||||
if isinstance(schema, unicode):
|
||||
schema = schema.encode("ascii")
|
||||
# end Py2K
|
||||
views = connection.execute(VIEW_SQL, schema_name=schema)
|
||||
|
||||
return [v["name"] for v in views]
|
||||
|
@ -50,13 +50,14 @@ url.py
|
||||
within a URL.
|
||||
"""
|
||||
|
||||
# not sure what this was used for
|
||||
#import sqlalchemy.databases
|
||||
|
||||
from .interfaces import (
|
||||
Compiled,
|
||||
Connectable,
|
||||
Dialect,
|
||||
ExecutionContext,
|
||||
|
||||
# backwards compat
|
||||
Compiled,
|
||||
TypeCompiler
|
||||
)
|
||||
|
||||
@ -82,12 +83,8 @@ from .util import (
|
||||
connection_memoize
|
||||
)
|
||||
|
||||
|
||||
from . import util, strategies
|
||||
|
||||
# backwards compat
|
||||
from ..sql import ddl
|
||||
|
||||
default_strategy = 'plain'
|
||||
|
||||
|
||||
@ -357,13 +354,10 @@ def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
|
||||
arguments.
|
||||
"""
|
||||
|
||||
options = dict((key[len(prefix):], configuration[key])
|
||||
for key in configuration
|
||||
if key.startswith(prefix))
|
||||
options['_coerce_config'] = True
|
||||
options.update(kwargs)
|
||||
url = options.pop('url')
|
||||
return create_engine(url, **options)
|
||||
opts = util._coerce_config(configuration, prefix)
|
||||
opts.update(kwargs)
|
||||
url = opts.pop('url')
|
||||
return create_engine(url, **opts)
|
||||
|
||||
|
||||
__all__ = (
|
||||
|
@ -3,16 +3,16 @@
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
from __future__ import with_statement
|
||||
|
||||
|
||||
"""Defines :class:`.Connection` and :class:`.Engine`.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
from __future__ import with_statement
|
||||
import sys
|
||||
from .. import exc, util, log, interfaces
|
||||
from ..sql import expression, util as sql_util, schema, ddl
|
||||
from .. import exc, schema, util, log, interfaces
|
||||
from ..sql import expression, util as sql_util
|
||||
from .interfaces import Connectable, Compiled
|
||||
from .util import _distill_params
|
||||
import contextlib
|
||||
@ -46,7 +46,7 @@ class Connection(Connectable):
|
||||
def __init__(self, engine, connection=None, close_with_result=False,
|
||||
_branch=False, _execution_options=None,
|
||||
_dispatch=None,
|
||||
_has_events=None):
|
||||
_has_events=False):
|
||||
"""Construct a new Connection.
|
||||
|
||||
The constructor here is not public and is only called only by an
|
||||
@ -65,13 +65,9 @@ class Connection(Connectable):
|
||||
self.__can_reconnect = True
|
||||
if _dispatch:
|
||||
self.dispatch = _dispatch
|
||||
elif _has_events is None:
|
||||
# if _has_events is sent explicitly as False,
|
||||
# then don't join the dispatch of the engine; we don't
|
||||
# want to handle any of the engine's events in that case.
|
||||
elif engine._has_events:
|
||||
self.dispatch = self.dispatch._join(engine.dispatch)
|
||||
self._has_events = _has_events or (
|
||||
_has_events is None and engine._has_events)
|
||||
self._has_events = _has_events or engine._has_events
|
||||
|
||||
self._echo = self.engine._should_log_info()
|
||||
if _execution_options:
|
||||
@ -80,9 +76,6 @@ class Connection(Connectable):
|
||||
else:
|
||||
self._execution_options = engine._execution_options
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.engine_connect(self, _branch)
|
||||
|
||||
def _branch(self):
|
||||
"""Return a new Connection which references this Connection's
|
||||
engine and connection; but does not have close_with_result enabled,
|
||||
@ -207,11 +200,16 @@ class Connection(Connectable):
|
||||
"""
|
||||
c = self._clone()
|
||||
c._execution_options = c._execution_options.union(opt)
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.set_connection_execution_options(c, opt)
|
||||
self.dialect.set_connection_execution_options(c, opt)
|
||||
if 'isolation_level' in opt:
|
||||
c._set_isolation_level()
|
||||
return c
|
||||
|
||||
def _set_isolation_level(self):
|
||||
self.dialect.set_isolation_level(self.connection,
|
||||
self._execution_options['isolation_level'])
|
||||
self.connection._connection_record.finalize_callback = \
|
||||
self.dialect.reset_isolation_level
|
||||
|
||||
@property
|
||||
def closed(self):
|
||||
"""Return True if this connection is closed."""
|
||||
@ -306,40 +304,20 @@ class Connection(Connectable):
|
||||
|
||||
def invalidate(self, exception=None):
|
||||
"""Invalidate the underlying DBAPI connection associated with
|
||||
this :class:`.Connection`.
|
||||
this Connection.
|
||||
|
||||
The underlying DBAPI connection is literally closed (if
|
||||
The underlying DB-API connection is literally closed (if
|
||||
possible), and is discarded. Its source connection pool will
|
||||
typically lazily create a new connection to replace it.
|
||||
|
||||
Upon the next use (where "use" typically means using the
|
||||
:meth:`.Connection.execute` method or similar),
|
||||
this :class:`.Connection` will attempt to
|
||||
procure a new DBAPI connection using the services of the
|
||||
:class:`.Pool` as a source of connectivty (e.g. a "reconnection").
|
||||
Upon the next usage, this Connection will attempt to reconnect
|
||||
to the pool with a new connection.
|
||||
|
||||
If a transaction was in progress (e.g. the
|
||||
:meth:`.Connection.begin` method has been called) when
|
||||
:meth:`.Connection.invalidate` method is called, at the DBAPI
|
||||
level all state associated with this transaction is lost, as
|
||||
the DBAPI connection is closed. The :class:`.Connection`
|
||||
will not allow a reconnection to proceed until the :class:`.Transaction`
|
||||
object is ended, by calling the :meth:`.Transaction.rollback`
|
||||
method; until that point, any attempt at continuing to use the
|
||||
:class:`.Connection` will raise an
|
||||
:class:`~sqlalchemy.exc.InvalidRequestError`.
|
||||
This is to prevent applications from accidentally
|
||||
continuing an ongoing transactional operations despite the
|
||||
fact that the transaction has been lost due to an
|
||||
invalidation.
|
||||
|
||||
The :meth:`.Connection.invalidate` method, just like auto-invalidation,
|
||||
will at the connection pool level invoke the :meth:`.PoolEvents.invalidate`
|
||||
event.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`pool_connection_invalidation`
|
||||
Transactions in progress remain in an "opened" state (even though the
|
||||
actual transaction is gone); these must be explicitly rolled back
|
||||
before a reconnect on this Connection can proceed. This is to prevent
|
||||
applications from accidentally continuing their transactional
|
||||
operations in a non-transactional state.
|
||||
|
||||
"""
|
||||
if self.invalidated:
|
||||
@ -356,26 +334,17 @@ class Connection(Connectable):
|
||||
def detach(self):
|
||||
"""Detach the underlying DB-API connection from its connection pool.
|
||||
|
||||
E.g.::
|
||||
|
||||
with engine.connect() as conn:
|
||||
conn.detach()
|
||||
conn.execute("SET search_path TO schema1, schema2")
|
||||
|
||||
# work with connection
|
||||
|
||||
# connection is fully closed (since we used "with:", can
|
||||
# also call .close())
|
||||
|
||||
This :class:`.Connection` instance will remain usable. When closed
|
||||
(or exited from a context manager context as above),
|
||||
This Connection instance will remain usable. When closed,
|
||||
the DB-API connection will be literally closed and not
|
||||
returned to its originating pool.
|
||||
returned to its pool. The pool will typically lazily create a
|
||||
new connection to replace the detached connection.
|
||||
|
||||
This method can be used to insulate the rest of an application
|
||||
from a modified state on a connection (such as a transaction
|
||||
isolation level or similar).
|
||||
|
||||
isolation level or similar). Also see
|
||||
:class:`~sqlalchemy.interfaces.PoolListener` for a mechanism to modify
|
||||
connection state when connections leave and return to their
|
||||
connection pool.
|
||||
"""
|
||||
|
||||
self.__connection.detach()
|
||||
@ -435,6 +404,7 @@ class Connection(Connectable):
|
||||
See also :meth:`.Connection.begin`,
|
||||
:meth:`.Connection.begin_twophase`.
|
||||
"""
|
||||
|
||||
if self.__transaction is None:
|
||||
self.__transaction = RootTransaction(self)
|
||||
else:
|
||||
@ -481,22 +451,20 @@ class Connection(Connectable):
|
||||
|
||||
return self.__transaction is not None
|
||||
|
||||
def _begin_impl(self, transaction):
|
||||
def _begin_impl(self):
|
||||
if self._echo:
|
||||
self.engine.logger.info("BEGIN (implicit)")
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
self.dispatch.begin(self)
|
||||
|
||||
try:
|
||||
self.engine.dialect.do_begin(self.connection)
|
||||
if self.connection._reset_agent is None:
|
||||
self.connection._reset_agent = transaction
|
||||
except Exception as e:
|
||||
except Exception, e:
|
||||
self._handle_dbapi_exception(e, None, None, None, None)
|
||||
|
||||
def _rollback_impl(self):
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
self.dispatch.rollback(self)
|
||||
|
||||
if self._still_open_and_connection_is_valid:
|
||||
@ -504,32 +472,26 @@ class Connection(Connectable):
|
||||
self.engine.logger.info("ROLLBACK")
|
||||
try:
|
||||
self.engine.dialect.do_rollback(self.connection)
|
||||
except Exception as e:
|
||||
self._handle_dbapi_exception(e, None, None, None, None)
|
||||
finally:
|
||||
if self.connection._reset_agent is self.__transaction:
|
||||
self.connection._reset_agent = None
|
||||
self.__transaction = None
|
||||
except Exception, e:
|
||||
self._handle_dbapi_exception(e, None, None, None, None)
|
||||
else:
|
||||
self.__transaction = None
|
||||
|
||||
def _commit_impl(self, autocommit=False):
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
self.dispatch.commit(self)
|
||||
|
||||
if self._echo:
|
||||
self.engine.logger.info("COMMIT")
|
||||
try:
|
||||
self.engine.dialect.do_commit(self.connection)
|
||||
except Exception as e:
|
||||
self._handle_dbapi_exception(e, None, None, None, None)
|
||||
finally:
|
||||
if self.connection._reset_agent is self.__transaction:
|
||||
self.connection._reset_agent = None
|
||||
self.__transaction = None
|
||||
except Exception, e:
|
||||
self._handle_dbapi_exception(e, None, None, None, None)
|
||||
|
||||
def _savepoint_impl(self, name=None):
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
self.dispatch.savepoint(self, name)
|
||||
|
||||
if name is None:
|
||||
@ -540,7 +502,7 @@ class Connection(Connectable):
|
||||
return name
|
||||
|
||||
def _rollback_to_savepoint_impl(self, name, context):
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
self.dispatch.rollback_savepoint(self, name, context)
|
||||
|
||||
if self._still_open_and_connection_is_valid:
|
||||
@ -548,27 +510,24 @@ class Connection(Connectable):
|
||||
self.__transaction = context
|
||||
|
||||
def _release_savepoint_impl(self, name, context):
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
self.dispatch.release_savepoint(self, name, context)
|
||||
|
||||
if self._still_open_and_connection_is_valid:
|
||||
self.engine.dialect.do_release_savepoint(self, name)
|
||||
self.__transaction = context
|
||||
|
||||
def _begin_twophase_impl(self, transaction):
|
||||
def _begin_twophase_impl(self, xid):
|
||||
if self._echo:
|
||||
self.engine.logger.info("BEGIN TWOPHASE (implicit)")
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.begin_twophase(self, transaction.xid)
|
||||
if self._has_events:
|
||||
self.dispatch.begin_twophase(self, xid)
|
||||
|
||||
if self._still_open_and_connection_is_valid:
|
||||
self.engine.dialect.do_begin_twophase(self, transaction.xid)
|
||||
|
||||
if self.connection._reset_agent is None:
|
||||
self.connection._reset_agent = transaction
|
||||
self.engine.dialect.do_begin_twophase(self, xid)
|
||||
|
||||
def _prepare_twophase_impl(self, xid):
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
self.dispatch.prepare_twophase(self, xid)
|
||||
|
||||
if self._still_open_and_connection_is_valid:
|
||||
@ -576,34 +535,22 @@ class Connection(Connectable):
|
||||
self.engine.dialect.do_prepare_twophase(self, xid)
|
||||
|
||||
def _rollback_twophase_impl(self, xid, is_prepared):
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
self.dispatch.rollback_twophase(self, xid, is_prepared)
|
||||
|
||||
if self._still_open_and_connection_is_valid:
|
||||
assert isinstance(self.__transaction, TwoPhaseTransaction)
|
||||
try:
|
||||
self.engine.dialect.do_rollback_twophase(self, xid, is_prepared)
|
||||
finally:
|
||||
if self.connection._reset_agent is self.__transaction:
|
||||
self.connection._reset_agent = None
|
||||
self.__transaction = None
|
||||
else:
|
||||
self.__transaction = None
|
||||
self.engine.dialect.do_rollback_twophase(self, xid, is_prepared)
|
||||
self.__transaction = None
|
||||
|
||||
def _commit_twophase_impl(self, xid, is_prepared):
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
self.dispatch.commit_twophase(self, xid, is_prepared)
|
||||
|
||||
if self._still_open_and_connection_is_valid:
|
||||
assert isinstance(self.__transaction, TwoPhaseTransaction)
|
||||
try:
|
||||
self.engine.dialect.do_commit_twophase(self, xid, is_prepared)
|
||||
finally:
|
||||
if self.connection._reset_agent is self.__transaction:
|
||||
self.connection._reset_agent = None
|
||||
self.__transaction = None
|
||||
else:
|
||||
self.__transaction = None
|
||||
self.engine.dialect.do_commit_twophase(self, xid, is_prepared)
|
||||
self.__transaction = None
|
||||
|
||||
def _autorollback(self):
|
||||
if not self.in_transaction():
|
||||
@ -635,8 +582,6 @@ class Connection(Connectable):
|
||||
else:
|
||||
if not self.__branch:
|
||||
conn.close()
|
||||
if conn._reset_agent is self.__transaction:
|
||||
conn._reset_agent = None
|
||||
del self.__connection
|
||||
self.__can_reconnect = False
|
||||
self.__transaction = None
|
||||
@ -708,16 +653,17 @@ class Connection(Connectable):
|
||||
DBAPI-agnostic way, use the :func:`~.expression.text` construct.
|
||||
|
||||
"""
|
||||
if isinstance(object, util.string_types[0]):
|
||||
return self._execute_text(object, multiparams, params)
|
||||
try:
|
||||
meth = object._execute_on_connection
|
||||
except AttributeError:
|
||||
for c in type(object).__mro__:
|
||||
if c in Connection.executors:
|
||||
return Connection.executors[c](
|
||||
self,
|
||||
object,
|
||||
multiparams,
|
||||
params)
|
||||
else:
|
||||
raise exc.InvalidRequestError(
|
||||
"Unexecutable object type: %s" %
|
||||
type(object))
|
||||
else:
|
||||
return meth(self, multiparams, params)
|
||||
|
||||
def _execute_function(self, func, multiparams, params):
|
||||
"""Execute a sql.FunctionElement object."""
|
||||
@ -728,7 +674,7 @@ class Connection(Connectable):
|
||||
def _execute_default(self, default, multiparams, params):
|
||||
"""Execute a schema.ColumnDefault object."""
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
for fn in self.dispatch.before_execute:
|
||||
default, multiparams, params = \
|
||||
fn(self, default, multiparams, params)
|
||||
@ -742,14 +688,14 @@ class Connection(Connectable):
|
||||
dialect = self.dialect
|
||||
ctx = dialect.execution_ctx_cls._init_default(
|
||||
dialect, self, conn)
|
||||
except Exception as e:
|
||||
except Exception, e:
|
||||
self._handle_dbapi_exception(e, None, None, None, None)
|
||||
|
||||
ret = ctx._exec_default(default, None)
|
||||
if self.should_close_with_result:
|
||||
self.close()
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
self.dispatch.after_execute(self,
|
||||
default, multiparams, params, ret)
|
||||
|
||||
@ -758,7 +704,7 @@ class Connection(Connectable):
|
||||
def _execute_ddl(self, ddl, multiparams, params):
|
||||
"""Execute a schema.DDL object."""
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
for fn in self.dispatch.before_execute:
|
||||
ddl, multiparams, params = \
|
||||
fn(self, ddl, multiparams, params)
|
||||
@ -773,7 +719,7 @@ class Connection(Connectable):
|
||||
None,
|
||||
compiled
|
||||
)
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
self.dispatch.after_execute(self,
|
||||
ddl, multiparams, params, ret)
|
||||
return ret
|
||||
@ -781,15 +727,13 @@ class Connection(Connectable):
|
||||
def _execute_clauseelement(self, elem, multiparams, params):
|
||||
"""Execute a sql.ClauseElement object."""
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
for fn in self.dispatch.before_execute:
|
||||
elem, multiparams, params = \
|
||||
fn(self, elem, multiparams, params)
|
||||
|
||||
distilled_params = _distill_params(multiparams, params)
|
||||
if distilled_params:
|
||||
# note this is usually dict but we support RowProxy
|
||||
# as well; but dict.keys() as an iterator is OK
|
||||
keys = distilled_params[0].keys()
|
||||
else:
|
||||
keys = []
|
||||
@ -816,7 +760,7 @@ class Connection(Connectable):
|
||||
distilled_params,
|
||||
compiled_sql, distilled_params
|
||||
)
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
self.dispatch.after_execute(self,
|
||||
elem, multiparams, params, ret)
|
||||
return ret
|
||||
@ -824,7 +768,7 @@ class Connection(Connectable):
|
||||
def _execute_compiled(self, compiled, multiparams, params):
|
||||
"""Execute a sql.Compiled object."""
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
for fn in self.dispatch.before_execute:
|
||||
compiled, multiparams, params = \
|
||||
fn(self, compiled, multiparams, params)
|
||||
@ -838,7 +782,7 @@ class Connection(Connectable):
|
||||
parameters,
|
||||
compiled, parameters
|
||||
)
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
self.dispatch.after_execute(self,
|
||||
compiled, multiparams, params, ret)
|
||||
return ret
|
||||
@ -846,7 +790,7 @@ class Connection(Connectable):
|
||||
def _execute_text(self, statement, multiparams, params):
|
||||
"""Execute a string SQL statement."""
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
for fn in self.dispatch.before_execute:
|
||||
statement, multiparams, params = \
|
||||
fn(self, statement, multiparams, params)
|
||||
@ -860,7 +804,7 @@ class Connection(Connectable):
|
||||
parameters,
|
||||
statement, parameters
|
||||
)
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
self.dispatch.after_execute(self,
|
||||
statement, multiparams, params, ret)
|
||||
return ret
|
||||
@ -878,7 +822,7 @@ class Connection(Connectable):
|
||||
conn = self._revalidate_connection()
|
||||
|
||||
context = constructor(dialect, self, conn, *args)
|
||||
except Exception as e:
|
||||
except Exception, e:
|
||||
self._handle_dbapi_exception(e,
|
||||
util.text_type(statement), parameters,
|
||||
None, None)
|
||||
@ -893,7 +837,7 @@ class Connection(Connectable):
|
||||
if not context.executemany:
|
||||
parameters = parameters[0]
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
for fn in self.dispatch.before_cursor_execute:
|
||||
statement, parameters = \
|
||||
fn(self, cursor, statement, parameters,
|
||||
@ -905,40 +849,23 @@ class Connection(Connectable):
|
||||
sql_util._repr_params(parameters, batches=10))
|
||||
try:
|
||||
if context.executemany:
|
||||
for fn in () if not self.dialect._has_events \
|
||||
else self.dialect.dispatch.do_executemany:
|
||||
if fn(cursor, statement, parameters, context):
|
||||
break
|
||||
else:
|
||||
self.dialect.do_executemany(
|
||||
cursor,
|
||||
statement,
|
||||
parameters,
|
||||
context)
|
||||
|
||||
self.dialect.do_executemany(
|
||||
cursor,
|
||||
statement,
|
||||
parameters,
|
||||
context)
|
||||
elif not parameters and context.no_parameters:
|
||||
for fn in () if not self.dialect._has_events \
|
||||
else self.dialect.dispatch.do_execute_no_params:
|
||||
if fn(cursor, statement, context):
|
||||
break
|
||||
else:
|
||||
self.dialect.do_execute_no_params(
|
||||
cursor,
|
||||
statement,
|
||||
context)
|
||||
|
||||
self.dialect.do_execute_no_params(
|
||||
cursor,
|
||||
statement,
|
||||
context)
|
||||
else:
|
||||
for fn in () if not self.dialect._has_events \
|
||||
else self.dialect.dispatch.do_execute:
|
||||
if fn(cursor, statement, parameters, context):
|
||||
break
|
||||
else:
|
||||
self.dialect.do_execute(
|
||||
cursor,
|
||||
statement,
|
||||
parameters,
|
||||
context)
|
||||
except Exception as e:
|
||||
self.dialect.do_execute(
|
||||
cursor,
|
||||
statement,
|
||||
parameters,
|
||||
context)
|
||||
except Exception, e:
|
||||
self._handle_dbapi_exception(
|
||||
e,
|
||||
statement,
|
||||
@ -946,7 +873,7 @@ class Connection(Connectable):
|
||||
cursor,
|
||||
context)
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
self.dispatch.after_cursor_execute(self, cursor,
|
||||
statement,
|
||||
parameters,
|
||||
@ -970,11 +897,6 @@ class Connection(Connectable):
|
||||
elif not context._is_explicit_returning:
|
||||
result.close(_autoclose_connection=False)
|
||||
result._metadata = None
|
||||
elif context.isupdate and context._is_implicit_returning:
|
||||
context._fetch_implicit_update_returning(result)
|
||||
result.close(_autoclose_connection=False)
|
||||
result._metadata = None
|
||||
|
||||
elif result._metadata is None:
|
||||
# no results, get rowcount
|
||||
# (which requires open cursor on some drivers
|
||||
@ -1001,41 +923,29 @@ class Connection(Connectable):
|
||||
terminates at _execute_context().
|
||||
|
||||
"""
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
for fn in self.dispatch.before_cursor_execute:
|
||||
statement, parameters = \
|
||||
fn(self, cursor, statement, parameters,
|
||||
context,
|
||||
False)
|
||||
context.executemany
|
||||
if context is not None else False)
|
||||
|
||||
if self._echo:
|
||||
self.engine.logger.info(statement)
|
||||
self.engine.logger.info("%r", parameters)
|
||||
try:
|
||||
for fn in () if not self.dialect._has_events \
|
||||
else self.dialect.dispatch.do_execute:
|
||||
if fn(cursor, statement, parameters, context):
|
||||
break
|
||||
else:
|
||||
self.dialect.do_execute(
|
||||
cursor,
|
||||
statement,
|
||||
parameters,
|
||||
context)
|
||||
except Exception as e:
|
||||
self.dialect.do_execute(
|
||||
cursor,
|
||||
statement,
|
||||
parameters)
|
||||
except Exception, e:
|
||||
self._handle_dbapi_exception(
|
||||
e,
|
||||
statement,
|
||||
parameters,
|
||||
cursor,
|
||||
context)
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.after_cursor_execute(self, cursor,
|
||||
statement,
|
||||
parameters,
|
||||
context,
|
||||
False)
|
||||
None)
|
||||
|
||||
def _safe_close_cursor(self, cursor):
|
||||
"""Close the given cursor, catching exceptions
|
||||
@ -1044,11 +954,17 @@ class Connection(Connectable):
|
||||
"""
|
||||
try:
|
||||
cursor.close()
|
||||
except (SystemExit, KeyboardInterrupt):
|
||||
raise
|
||||
except Exception:
|
||||
self.connection._logger.error(
|
||||
"Error closing cursor", exc_info=True)
|
||||
except Exception, e:
|
||||
try:
|
||||
ex_text = str(e)
|
||||
except TypeError:
|
||||
ex_text = repr(e)
|
||||
if not self.closed:
|
||||
self.connection._logger.warn(
|
||||
"Error closing cursor: %s", ex_text)
|
||||
|
||||
if isinstance(e, (SystemExit, KeyboardInterrupt)):
|
||||
raise
|
||||
|
||||
_reentrant_error = False
|
||||
_is_disconnect = False
|
||||
@ -1078,12 +994,12 @@ class Connection(Connectable):
|
||||
self._reentrant_error = True
|
||||
try:
|
||||
# non-DBAPI error - if we already got a context,
|
||||
# or there's no string statement, don't wrap it
|
||||
# or theres no string statement, don't wrap it
|
||||
should_wrap = isinstance(e, self.dialect.dbapi.Error) or \
|
||||
(statement is not None and context is None)
|
||||
|
||||
if should_wrap and context:
|
||||
if self._has_events or self.engine._has_events:
|
||||
if self._has_events:
|
||||
self.dispatch.dbapi_error(self,
|
||||
cursor,
|
||||
statement,
|
||||
@ -1115,11 +1031,23 @@ class Connection(Connectable):
|
||||
if self._is_disconnect:
|
||||
del self._is_disconnect
|
||||
dbapi_conn_wrapper = self.connection
|
||||
self.engine.pool._invalidate(dbapi_conn_wrapper, e)
|
||||
self.invalidate(e)
|
||||
if not hasattr(dbapi_conn_wrapper, '_pool') or \
|
||||
dbapi_conn_wrapper._pool is self.engine.pool:
|
||||
self.engine.dispose()
|
||||
if self.should_close_with_result:
|
||||
self.close()
|
||||
|
||||
# poor man's multimethod/generic function thingy
|
||||
executors = {
|
||||
expression.FunctionElement: _execute_function,
|
||||
expression.ClauseElement: _execute_clauseelement,
|
||||
Compiled: _execute_compiled,
|
||||
schema.SchemaItem: _execute_default,
|
||||
schema.DDLElement: _execute_ddl,
|
||||
basestring: _execute_text
|
||||
}
|
||||
|
||||
def default_schema_name(self):
|
||||
return self.engine.dialect.get_default_schema_name(self)
|
||||
|
||||
@ -1287,7 +1215,7 @@ class Transaction(object):
|
||||
class RootTransaction(Transaction):
|
||||
def __init__(self, connection):
|
||||
super(RootTransaction, self).__init__(connection, None)
|
||||
self.connection._begin_impl(self)
|
||||
self.connection._begin_impl()
|
||||
|
||||
def _do_rollback(self):
|
||||
if self.is_active:
|
||||
@ -1336,7 +1264,7 @@ class TwoPhaseTransaction(Transaction):
|
||||
super(TwoPhaseTransaction, self).__init__(connection, None)
|
||||
self._is_prepared = False
|
||||
self.xid = xid
|
||||
self.connection._begin_twophase_impl(self)
|
||||
self.connection._begin_twophase_impl(self.xid)
|
||||
|
||||
def prepare(self):
|
||||
"""Prepare this :class:`.TwoPhaseTransaction`.
|
||||
@ -1412,10 +1340,15 @@ class Engine(Connectable, log.Identified):
|
||||
:meth:`.Engine.execution_options`
|
||||
|
||||
"""
|
||||
if 'isolation_level' in opt:
|
||||
raise exc.ArgumentError(
|
||||
"'isolation_level' execution option may "
|
||||
"only be specified on Connection.execution_options(). "
|
||||
"To set engine-wide isolation level, "
|
||||
"use the isolation_level argument to create_engine()."
|
||||
)
|
||||
self._execution_options = \
|
||||
self._execution_options.union(opt)
|
||||
self.dispatch.set_engine_execution_options(self, opt)
|
||||
self.dialect.set_engine_execution_options(self, opt)
|
||||
|
||||
def execution_options(self, **opt):
|
||||
"""Return a new :class:`.Engine` that will provide
|
||||
@ -1526,8 +1459,7 @@ class Engine(Connectable, log.Identified):
|
||||
the engine are not affected.
|
||||
|
||||
"""
|
||||
self.pool.dispose()
|
||||
self.pool = self.pool.recreate()
|
||||
self.pool = self.pool._replace()
|
||||
|
||||
def _execute_default(self, default):
|
||||
with self.contextual_connect() as conn:
|
||||
@ -1745,17 +1677,6 @@ class Engine(Connectable, log.Identified):
|
||||
return self.dialect.get_table_names(conn, schema)
|
||||
|
||||
def has_table(self, table_name, schema=None):
|
||||
"""Return True if the given backend has a table of the given name.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`metadata_reflection_inspector` - detailed schema inspection using
|
||||
the :class:`.Inspector` interface.
|
||||
|
||||
:class:`.quoted_name` - used to pass quoting information along
|
||||
with a schema identifier.
|
||||
|
||||
"""
|
||||
return self.run_callable(self.dialect.has_table, table_name, schema)
|
||||
|
||||
def raw_connection(self):
|
||||
|
193
lib/sqlalchemy/engine/ddl.py
Normal file
193
lib/sqlalchemy/engine/ddl.py
Normal file
@ -0,0 +1,193 @@
|
||||
# engine/ddl.py
|
||||
# Copyright (C) 2009-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Routines to handle CREATE/DROP workflow."""
|
||||
|
||||
from .. import schema
|
||||
from ..sql import util as sql_util
|
||||
|
||||
|
||||
class DDLBase(schema.SchemaVisitor):
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
||||
|
||||
|
||||
class SchemaGenerator(DDLBase):
|
||||
|
||||
def __init__(self, dialect, connection, checkfirst=False,
|
||||
tables=None, **kwargs):
|
||||
super(SchemaGenerator, self).__init__(connection, **kwargs)
|
||||
self.checkfirst = checkfirst
|
||||
self.tables = tables
|
||||
self.preparer = dialect.identifier_preparer
|
||||
self.dialect = dialect
|
||||
self.memo = {}
|
||||
|
||||
def _can_create_table(self, table):
|
||||
self.dialect.validate_identifier(table.name)
|
||||
if table.schema:
|
||||
self.dialect.validate_identifier(table.schema)
|
||||
return not self.checkfirst or \
|
||||
not self.dialect.has_table(self.connection,
|
||||
table.name, schema=table.schema)
|
||||
|
||||
def _can_create_sequence(self, sequence):
|
||||
return self.dialect.supports_sequences and \
|
||||
(
|
||||
(not self.dialect.sequences_optional or
|
||||
not sequence.optional) and
|
||||
(
|
||||
not self.checkfirst or
|
||||
not self.dialect.has_sequence(
|
||||
self.connection,
|
||||
sequence.name,
|
||||
schema=sequence.schema)
|
||||
)
|
||||
)
|
||||
|
||||
def visit_metadata(self, metadata):
|
||||
if self.tables is not None:
|
||||
tables = self.tables
|
||||
else:
|
||||
tables = metadata.tables.values()
|
||||
collection = [t for t in sql_util.sort_tables(tables)
|
||||
if self._can_create_table(t)]
|
||||
seq_coll = [s for s in metadata._sequences.values()
|
||||
if s.column is None and self._can_create_sequence(s)]
|
||||
|
||||
metadata.dispatch.before_create(metadata, self.connection,
|
||||
tables=collection,
|
||||
checkfirst=self.checkfirst,
|
||||
_ddl_runner=self)
|
||||
|
||||
for seq in seq_coll:
|
||||
self.traverse_single(seq, create_ok=True)
|
||||
|
||||
for table in collection:
|
||||
self.traverse_single(table, create_ok=True)
|
||||
|
||||
metadata.dispatch.after_create(metadata, self.connection,
|
||||
tables=collection,
|
||||
checkfirst=self.checkfirst,
|
||||
_ddl_runner=self)
|
||||
|
||||
def visit_table(self, table, create_ok=False):
|
||||
if not create_ok and not self._can_create_table(table):
|
||||
return
|
||||
|
||||
table.dispatch.before_create(table, self.connection,
|
||||
checkfirst=self.checkfirst,
|
||||
_ddl_runner=self)
|
||||
|
||||
for column in table.columns:
|
||||
if column.default is not None:
|
||||
self.traverse_single(column.default)
|
||||
|
||||
self.connection.execute(schema.CreateTable(table))
|
||||
|
||||
if hasattr(table, 'indexes'):
|
||||
for index in table.indexes:
|
||||
self.traverse_single(index)
|
||||
|
||||
table.dispatch.after_create(table, self.connection,
|
||||
checkfirst=self.checkfirst,
|
||||
_ddl_runner=self)
|
||||
|
||||
def visit_sequence(self, sequence, create_ok=False):
|
||||
if not create_ok and not self._can_create_sequence(sequence):
|
||||
return
|
||||
self.connection.execute(schema.CreateSequence(sequence))
|
||||
|
||||
def visit_index(self, index):
|
||||
self.connection.execute(schema.CreateIndex(index))
|
||||
|
||||
|
||||
class SchemaDropper(DDLBase):
|
||||
|
||||
def __init__(self, dialect, connection, checkfirst=False,
|
||||
tables=None, **kwargs):
|
||||
super(SchemaDropper, self).__init__(connection, **kwargs)
|
||||
self.checkfirst = checkfirst
|
||||
self.tables = tables
|
||||
self.preparer = dialect.identifier_preparer
|
||||
self.dialect = dialect
|
||||
self.memo = {}
|
||||
|
||||
def visit_metadata(self, metadata):
|
||||
if self.tables is not None:
|
||||
tables = self.tables
|
||||
else:
|
||||
tables = metadata.tables.values()
|
||||
|
||||
collection = [
|
||||
t
|
||||
for t in reversed(sql_util.sort_tables(tables))
|
||||
if self._can_drop_table(t)
|
||||
]
|
||||
|
||||
seq_coll = [
|
||||
s
|
||||
for s in metadata._sequences.values()
|
||||
if s.column is None and self._can_drop_sequence(s)
|
||||
]
|
||||
|
||||
metadata.dispatch.before_drop(
|
||||
metadata, self.connection, tables=collection,
|
||||
checkfirst=self.checkfirst, _ddl_runner=self)
|
||||
|
||||
for table in collection:
|
||||
self.traverse_single(table, drop_ok=True)
|
||||
|
||||
for seq in seq_coll:
|
||||
self.traverse_single(seq, drop_ok=True)
|
||||
|
||||
metadata.dispatch.after_drop(
|
||||
metadata, self.connection, tables=collection,
|
||||
checkfirst=self.checkfirst, _ddl_runner=self)
|
||||
|
||||
def _can_drop_table(self, table):
|
||||
self.dialect.validate_identifier(table.name)
|
||||
if table.schema:
|
||||
self.dialect.validate_identifier(table.schema)
|
||||
return not self.checkfirst or self.dialect.has_table(self.connection,
|
||||
table.name, schema=table.schema)
|
||||
|
||||
def _can_drop_sequence(self, sequence):
|
||||
return self.dialect.supports_sequences and \
|
||||
((not self.dialect.sequences_optional or
|
||||
not sequence.optional) and
|
||||
(not self.checkfirst or
|
||||
self.dialect.has_sequence(
|
||||
self.connection,
|
||||
sequence.name,
|
||||
schema=sequence.schema))
|
||||
)
|
||||
|
||||
def visit_index(self, index):
|
||||
self.connection.execute(schema.DropIndex(index))
|
||||
|
||||
def visit_table(self, table, drop_ok=False):
|
||||
if not drop_ok and not self._can_drop_table(table):
|
||||
return
|
||||
|
||||
table.dispatch.before_drop(table, self.connection,
|
||||
checkfirst=self.checkfirst,
|
||||
_ddl_runner=self)
|
||||
|
||||
for column in table.columns:
|
||||
if column.default is not None:
|
||||
self.traverse_single(column.default)
|
||||
|
||||
self.connection.execute(schema.DropTable(table))
|
||||
|
||||
table.dispatch.after_drop(table, self.connection,
|
||||
checkfirst=self.checkfirst,
|
||||
_ddl_runner=self)
|
||||
|
||||
def visit_sequence(self, sequence, drop_ok=False):
|
||||
if not drop_ok and not self._can_drop_sequence(sequence):
|
||||
return
|
||||
self.connection.execute(schema.DropSequence(sequence))
|
@ -16,18 +16,15 @@ import re
|
||||
import random
|
||||
from . import reflection, interfaces, result
|
||||
from ..sql import compiler, expression
|
||||
from .. import types as sqltypes
|
||||
from .. import exc, util, pool, processors
|
||||
from .. import exc, types as sqltypes, util, pool, processors
|
||||
import codecs
|
||||
import weakref
|
||||
from .. import event
|
||||
|
||||
AUTOCOMMIT_REGEXP = re.compile(
|
||||
r'\s*(?:UPDATE|INSERT|CREATE|DELETE|DROP|ALTER)',
|
||||
re.I | re.UNICODE)
|
||||
|
||||
|
||||
|
||||
class DefaultDialect(interfaces.Dialect):
|
||||
"""Default implementation of Dialect"""
|
||||
|
||||
@ -52,40 +49,25 @@ class DefaultDialect(interfaces.Dialect):
|
||||
postfetch_lastrowid = True
|
||||
implicit_returning = False
|
||||
|
||||
supports_right_nested_joins = True
|
||||
|
||||
supports_native_enum = False
|
||||
supports_native_boolean = False
|
||||
|
||||
supports_simple_order_by_label = True
|
||||
|
||||
engine_config_types = util.immutabledict([
|
||||
('convert_unicode', util.bool_or_str('force')),
|
||||
('pool_timeout', int),
|
||||
('echo', util.bool_or_str('debug')),
|
||||
('echo_pool', util.bool_or_str('debug')),
|
||||
('pool_recycle', int),
|
||||
('pool_size', int),
|
||||
('max_overflow', int),
|
||||
('pool_threadlocal', bool),
|
||||
('use_native_unicode', bool),
|
||||
])
|
||||
|
||||
# if the NUMERIC type
|
||||
# returns decimal.Decimal.
|
||||
# *not* the FLOAT type however.
|
||||
supports_native_decimal = False
|
||||
|
||||
if util.py3k:
|
||||
supports_unicode_statements = True
|
||||
supports_unicode_binds = True
|
||||
returns_unicode_strings = True
|
||||
description_encoding = None
|
||||
else:
|
||||
supports_unicode_statements = False
|
||||
supports_unicode_binds = False
|
||||
returns_unicode_strings = False
|
||||
description_encoding = 'use_encoding'
|
||||
# Py3K
|
||||
#supports_unicode_statements = True
|
||||
#supports_unicode_binds = True
|
||||
#returns_unicode_strings = True
|
||||
#description_encoding = None
|
||||
# Py2K
|
||||
supports_unicode_statements = False
|
||||
supports_unicode_binds = False
|
||||
returns_unicode_strings = False
|
||||
description_encoding = 'use_encoding'
|
||||
# end Py2K
|
||||
|
||||
name = 'default'
|
||||
|
||||
@ -111,43 +93,6 @@ class DefaultDialect(interfaces.Dialect):
|
||||
|
||||
server_version_info = None
|
||||
|
||||
construct_arguments = None
|
||||
"""Optional set of argument specifiers for various SQLAlchemy
|
||||
constructs, typically schema items.
|
||||
|
||||
To implement, establish as a series of tuples, as in::
|
||||
|
||||
construct_arguments = [
|
||||
(schema.Index, {
|
||||
"using": False,
|
||||
"where": None,
|
||||
"ops": None
|
||||
})
|
||||
]
|
||||
|
||||
If the above construct is established on the Postgresql dialect,
|
||||
the :class:`.Index` construct will now accept the keyword arguments
|
||||
``postgresql_using``, ``postgresql_where``, nad ``postgresql_ops``.
|
||||
Any other argument specified to the constructor of :class:`.Index`
|
||||
which is prefixed with ``postgresql_`` will raise :class:`.ArgumentError`.
|
||||
|
||||
A dialect which does not include a ``construct_arguments`` member will
|
||||
not participate in the argument validation system. For such a dialect,
|
||||
any argument name is accepted by all participating constructs, within
|
||||
the namespace of arguments prefixed with that dialect name. The rationale
|
||||
here is so that third-party dialects that haven't yet implemented this
|
||||
feature continue to function in the old way.
|
||||
|
||||
.. versionadded:: 0.9.2
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.DialectKWArgs` - implementing base class which consumes
|
||||
:attr:`.DefaultDialect.construct_arguments`
|
||||
|
||||
|
||||
"""
|
||||
|
||||
# indicates symbol names are
|
||||
# UPPERCASEd if they are case insensitive
|
||||
# within the database.
|
||||
@ -160,9 +105,7 @@ class DefaultDialect(interfaces.Dialect):
|
||||
def __init__(self, convert_unicode=False,
|
||||
encoding='utf-8', paramstyle=None, dbapi=None,
|
||||
implicit_returning=None,
|
||||
supports_right_nested_joins=None,
|
||||
case_sensitive=True,
|
||||
supports_native_boolean=None,
|
||||
label_length=None, **kwargs):
|
||||
|
||||
if not getattr(self, 'ported_sqla_06', True):
|
||||
@ -186,10 +129,7 @@ class DefaultDialect(interfaces.Dialect):
|
||||
self.positional = self.paramstyle in ('qmark', 'format', 'numeric')
|
||||
self.identifier_preparer = self.preparer(self)
|
||||
self.type_compiler = self.type_compiler(self)
|
||||
if supports_right_nested_joins is not None:
|
||||
self.supports_right_nested_joins = supports_right_nested_joins
|
||||
if supports_native_boolean is not None:
|
||||
self.supports_native_boolean = supports_native_boolean
|
||||
|
||||
self.case_sensitive = case_sensitive
|
||||
|
||||
if label_length and label_length > self.max_identifier_length:
|
||||
@ -212,8 +152,6 @@ class DefaultDialect(interfaces.Dialect):
|
||||
self._encoder = codecs.getencoder(self.encoding)
|
||||
self._decoder = processors.to_unicode_processor_factory(self.encoding)
|
||||
|
||||
|
||||
|
||||
@util.memoized_property
|
||||
def _type_memos(self):
|
||||
return weakref.WeakKeyDictionary()
|
||||
@ -246,10 +184,6 @@ class DefaultDialect(interfaces.Dialect):
|
||||
|
||||
self.returns_unicode_strings = self._check_unicode_returns(connection)
|
||||
|
||||
if self.description_encoding is not None and \
|
||||
self._check_unicode_description(connection):
|
||||
self._description_decoder = self.description_encoding = None
|
||||
|
||||
self.do_rollback(connection.connection)
|
||||
|
||||
def on_connect(self):
|
||||
@ -266,78 +200,50 @@ class DefaultDialect(interfaces.Dialect):
|
||||
"""
|
||||
return None
|
||||
|
||||
def _check_unicode_returns(self, connection, additional_tests=None):
|
||||
if util.py2k and not self.supports_unicode_statements:
|
||||
cast_to = util.binary_type
|
||||
def _check_unicode_returns(self, connection):
|
||||
# Py2K
|
||||
if self.supports_unicode_statements:
|
||||
cast_to = unicode
|
||||
else:
|
||||
cast_to = util.text_type
|
||||
cast_to = str
|
||||
# end Py2K
|
||||
# Py3K
|
||||
#cast_to = str
|
||||
|
||||
if self.positional:
|
||||
parameters = self.execute_sequence_format()
|
||||
else:
|
||||
parameters = {}
|
||||
|
||||
def check_unicode(test):
|
||||
statement = cast_to(expression.select([test]).compile(dialect=self))
|
||||
def check_unicode(formatstr, type_):
|
||||
cursor = connection.connection.cursor()
|
||||
try:
|
||||
cursor = connection.connection.cursor()
|
||||
connection._cursor_execute(cursor, statement, parameters)
|
||||
row = cursor.fetchone()
|
||||
try:
|
||||
cursor.execute(
|
||||
cast_to(
|
||||
expression.select(
|
||||
[expression.cast(
|
||||
expression.literal_column(
|
||||
"'test %s returns'" % formatstr),
|
||||
type_)
|
||||
]).compile(dialect=self)
|
||||
)
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
|
||||
return isinstance(row[0], unicode)
|
||||
except self.dbapi.Error, de:
|
||||
util.warn("Exception attempting to "
|
||||
"detect unicode returns: %r" % de)
|
||||
return False
|
||||
finally:
|
||||
cursor.close()
|
||||
except exc.DBAPIError as de:
|
||||
# note that _cursor_execute() will have closed the cursor
|
||||
# if an exception is thrown.
|
||||
util.warn("Exception attempting to "
|
||||
"detect unicode returns: %r" % de)
|
||||
return False
|
||||
else:
|
||||
return isinstance(row[0], util.text_type)
|
||||
|
||||
tests = [
|
||||
# detect plain VARCHAR
|
||||
expression.cast(
|
||||
expression.literal_column("'test plain returns'"),
|
||||
sqltypes.VARCHAR(60)
|
||||
),
|
||||
# detect if there's an NVARCHAR type with different behavior available
|
||||
expression.cast(
|
||||
expression.literal_column("'test unicode returns'"),
|
||||
sqltypes.Unicode(60)
|
||||
),
|
||||
]
|
||||
# detect plain VARCHAR
|
||||
unicode_for_varchar = check_unicode("plain", sqltypes.VARCHAR(60))
|
||||
|
||||
if additional_tests:
|
||||
tests += additional_tests
|
||||
# detect if there's an NVARCHAR type with different behavior available
|
||||
unicode_for_unicode = check_unicode("unicode", sqltypes.Unicode(60))
|
||||
|
||||
results = set([check_unicode(test) for test in tests])
|
||||
|
||||
if results.issuperset([True, False]):
|
||||
if unicode_for_unicode and not unicode_for_varchar:
|
||||
return "conditional"
|
||||
else:
|
||||
return results == set([True])
|
||||
|
||||
def _check_unicode_description(self, connection):
|
||||
# all DBAPIs on Py2K return cursor.description as encoded,
|
||||
# until pypy2.1beta2 with sqlite, so let's just check it -
|
||||
# it's likely others will start doing this too in Py2k.
|
||||
|
||||
if util.py2k and not self.supports_unicode_statements:
|
||||
cast_to = util.binary_type
|
||||
else:
|
||||
cast_to = util.text_type
|
||||
|
||||
cursor = connection.connection.cursor()
|
||||
try:
|
||||
cursor.execute(
|
||||
cast_to(
|
||||
expression.select([
|
||||
expression.literal_column("'x'").label("some_label")
|
||||
]).compile(dialect=self)
|
||||
)
|
||||
)
|
||||
return isinstance(cursor.description[0][0], util.text_type)
|
||||
finally:
|
||||
cursor.close()
|
||||
return unicode_for_varchar
|
||||
|
||||
def type_descriptor(self, typeobj):
|
||||
"""Provide a database-specific :class:`.TypeEngine` object, given
|
||||
@ -350,7 +256,8 @@ class DefaultDialect(interfaces.Dialect):
|
||||
"""
|
||||
return sqltypes.adapt_type(typeobj, self.colspecs)
|
||||
|
||||
def reflecttable(self, connection, table, include_columns, exclude_columns):
|
||||
def reflecttable(self, connection, table, include_columns,
|
||||
exclude_columns=None):
|
||||
insp = reflection.Inspector.from_engine(connection)
|
||||
return insp.reflecttable(table, include_columns, exclude_columns)
|
||||
|
||||
@ -380,24 +287,6 @@ class DefaultDialect(interfaces.Dialect):
|
||||
opts.update(url.query)
|
||||
return [[], opts]
|
||||
|
||||
def set_engine_execution_options(self, engine, opts):
|
||||
if 'isolation_level' in opts:
|
||||
isolation_level = opts['isolation_level']
|
||||
@event.listens_for(engine, "engine_connect")
|
||||
def set_isolation(connection, branch):
|
||||
if not branch:
|
||||
self._set_connection_isolation(connection, isolation_level)
|
||||
|
||||
def set_connection_execution_options(self, connection, opts):
|
||||
if 'isolation_level' in opts:
|
||||
self._set_connection_isolation(connection, opts['isolation_level'])
|
||||
|
||||
def _set_connection_isolation(self, connection, level):
|
||||
self.set_isolation_level(connection.connection, level)
|
||||
connection.connection._connection_record.\
|
||||
finalize_callback.append(self.reset_isolation_level)
|
||||
|
||||
|
||||
def do_begin(self, dbapi_connection):
|
||||
pass
|
||||
|
||||
@ -458,7 +347,6 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
||||
statement = None
|
||||
postfetch_cols = None
|
||||
prefetch_cols = None
|
||||
returning_cols = None
|
||||
_is_implicit_returning = False
|
||||
_is_explicit_returning = False
|
||||
|
||||
@ -485,10 +373,10 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
||||
self.execution_options.update(connection._execution_options)
|
||||
|
||||
if not dialect.supports_unicode_statements:
|
||||
self.unicode_statement = util.text_type(compiled)
|
||||
self.unicode_statement = unicode(compiled)
|
||||
self.statement = dialect._encoder(self.unicode_statement)[0]
|
||||
else:
|
||||
self.statement = self.unicode_statement = util.text_type(compiled)
|
||||
self.statement = self.unicode_statement = unicode(compiled)
|
||||
|
||||
self.cursor = self.create_cursor()
|
||||
self.compiled_parameters = []
|
||||
@ -526,7 +414,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
||||
|
||||
self.result_map = compiled.result_map
|
||||
|
||||
self.unicode_statement = util.text_type(compiled)
|
||||
self.unicode_statement = unicode(compiled)
|
||||
if not dialect.supports_unicode_statements:
|
||||
self.statement = self.unicode_statement.encode(
|
||||
self.dialect.encoding)
|
||||
@ -555,7 +443,6 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
||||
if self.isinsert or self.isupdate:
|
||||
self.postfetch_cols = self.compiled.postfetch
|
||||
self.prefetch_cols = self.compiled.prefetch
|
||||
self.returning_cols = self.compiled.returning
|
||||
self.__process_defaults()
|
||||
|
||||
processors = compiled._bind_processors
|
||||
@ -632,7 +519,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
||||
self.executemany = len(parameters) > 1
|
||||
|
||||
if not dialect.supports_unicode_statements and \
|
||||
isinstance(statement, util.text_type):
|
||||
isinstance(statement, unicode):
|
||||
self.unicode_statement = statement
|
||||
self.statement = dialect._encoder(statement)[0]
|
||||
else:
|
||||
@ -686,8 +573,8 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
||||
"""
|
||||
|
||||
conn = self.root_connection
|
||||
if isinstance(stmt, util.text_type) and \
|
||||
not self.dialect.supports_unicode_statements:
|
||||
if isinstance(stmt, unicode) and \
|
||||
not self.dialect.supports_unicode_statements:
|
||||
stmt = self.dialect._encoder(stmt)[0]
|
||||
|
||||
if self.dialect.positional:
|
||||
@ -814,11 +701,6 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
||||
ipk.append(row[c])
|
||||
|
||||
self.inserted_primary_key = ipk
|
||||
self.returned_defaults = row
|
||||
|
||||
def _fetch_implicit_update_returning(self, resultproxy):
|
||||
row = resultproxy.fetchone()
|
||||
self.returned_defaults = row
|
||||
|
||||
def lastrow_has_defaults(self):
|
||||
return (self.isinsert or self.isupdate) and \
|
||||
@ -852,7 +734,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
||||
inputsizes.append(dbtype)
|
||||
try:
|
||||
self.cursor.setinputsizes(*inputsizes)
|
||||
except Exception as e:
|
||||
except Exception, e:
|
||||
self.root_connection._handle_dbapi_exception(
|
||||
e, None, None, None, self)
|
||||
else:
|
||||
@ -870,7 +752,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
||||
inputsizes[key] = dbtype
|
||||
try:
|
||||
self.cursor.setinputsizes(**inputsizes)
|
||||
except Exception as e:
|
||||
except Exception, e:
|
||||
self.root_connection._handle_dbapi_exception(
|
||||
e, None, None, None, self)
|
||||
|
||||
@ -905,8 +787,6 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
||||
and generate inserted_primary_key collection.
|
||||
"""
|
||||
|
||||
key_getter = self.compiled._key_getters_for_crud_column[2]
|
||||
|
||||
if self.executemany:
|
||||
if len(self.compiled.prefetch):
|
||||
scalar_defaults = {}
|
||||
@ -930,7 +810,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
||||
else:
|
||||
val = self.get_update_default(c)
|
||||
if val is not None:
|
||||
param[key_getter(c)] = val
|
||||
param[c.key] = val
|
||||
del self.current_parameters
|
||||
else:
|
||||
self.current_parameters = compiled_parameters = \
|
||||
@ -943,12 +823,12 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
||||
val = self.get_update_default(c)
|
||||
|
||||
if val is not None:
|
||||
compiled_parameters[key_getter(c)] = val
|
||||
compiled_parameters[c.key] = val
|
||||
del self.current_parameters
|
||||
|
||||
if self.isinsert:
|
||||
self.inserted_primary_key = [
|
||||
self.compiled_parameters[0].get(key_getter(c), None)
|
||||
self.compiled_parameters[0].get(c.key, None)
|
||||
for c in self.compiled.\
|
||||
statement.table.primary_key
|
||||
]
|
||||
|
@ -6,10 +6,8 @@
|
||||
|
||||
"""Define core interfaces used by the engine system."""
|
||||
|
||||
from .. import util, event
|
||||
from .. import util, event, events
|
||||
|
||||
# backwards compat
|
||||
from ..sql.compiler import Compiled, TypeCompiler
|
||||
|
||||
class Dialect(object):
|
||||
"""Define the behavior of a specific database and DB-API combination.
|
||||
@ -150,9 +148,6 @@ class Dialect(object):
|
||||
|
||||
"""
|
||||
|
||||
_has_events = False
|
||||
|
||||
|
||||
def create_connect_args(self, url):
|
||||
"""Build DB-API compatible connection arguments.
|
||||
|
||||
@ -189,28 +184,26 @@ class Dialect(object):
|
||||
The connection passed here is a SQLAlchemy Connection object,
|
||||
with full capabilities.
|
||||
|
||||
The initialize() method of the base dialect should be called via
|
||||
The initalize() method of the base dialect should be called via
|
||||
super().
|
||||
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
def reflecttable(self, connection, table, include_columns, exclude_columns):
|
||||
def reflecttable(self, connection, table, include_columns=None):
|
||||
"""Load table description from the database.
|
||||
|
||||
Given a :class:`.Connection` and a
|
||||
:class:`~sqlalchemy.schema.Table` object, reflect its columns and
|
||||
properties from the database.
|
||||
properties from the database. If include_columns (a list or
|
||||
set) is specified, limit the autoload to the given column
|
||||
names.
|
||||
|
||||
The implementation of this method is provided by
|
||||
:meth:`.DefaultDialect.reflecttable`, which makes use of
|
||||
:class:`.Inspector` to retrieve column information.
|
||||
|
||||
Dialects should **not** seek to implement this method, and should
|
||||
instead implement individual schema inspection operations such as
|
||||
:meth:`.Dialect.get_columns`, :meth:`.Dialect.get_pk_constraint`,
|
||||
etc.
|
||||
The default implementation uses the
|
||||
:class:`~sqlalchemy.engine.reflection.Inspector` interface to
|
||||
provide the output, building upon the granular table/column/
|
||||
constraint etc. methods of :class:`.Dialect`.
|
||||
|
||||
"""
|
||||
|
||||
@ -253,7 +246,7 @@ class Dialect(object):
|
||||
|
||||
Deprecated. This method is only called by the default
|
||||
implementation of :meth:`.Dialect.get_pk_constraint`. Dialects should
|
||||
instead implement the :meth:`.Dialect.get_pk_constraint` method directly.
|
||||
instead implement this method directly.
|
||||
|
||||
"""
|
||||
|
||||
@ -345,7 +338,7 @@ class Dialect(object):
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_unique_constraints(self, connection, table_name, schema=None, **kw):
|
||||
def get_unique_constraints(self, table_name, schema=None, **kw):
|
||||
"""Return information about unique constraints in `table_name`.
|
||||
|
||||
Given a string `table_name` and an optional string `schema`, return
|
||||
@ -357,10 +350,6 @@ class Dialect(object):
|
||||
column_names
|
||||
list of column names in order
|
||||
|
||||
\**kw
|
||||
other options passed to the dialect's get_unique_constraints() method.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
"""
|
||||
|
||||
@ -443,7 +432,7 @@ class Dialect(object):
|
||||
:meth:`.Dialect.do_autocommit`
|
||||
hook is provided for DBAPIs that need some extra commands emitted
|
||||
after a commit in order to enter the next transaction, when the
|
||||
SQLAlchemy :class:`.Connection` is used in its default "autocommit"
|
||||
SQLAlchemy :class:`.Connection` is used in it's default "autocommit"
|
||||
mode.
|
||||
|
||||
:param dbapi_connection: a DBAPI connection, typically
|
||||
@ -776,6 +765,110 @@ class ExecutionContext(object):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class Compiled(object):
|
||||
"""Represent a compiled SQL or DDL expression.
|
||||
|
||||
The ``__str__`` method of the ``Compiled`` object should produce
|
||||
the actual text of the statement. ``Compiled`` objects are
|
||||
specific to their underlying database dialect, and also may
|
||||
or may not be specific to the columns referenced within a
|
||||
particular set of bind parameters. In no case should the
|
||||
``Compiled`` object be dependent on the actual values of those
|
||||
bind parameters, even though it may reference those values as
|
||||
defaults.
|
||||
"""
|
||||
|
||||
def __init__(self, dialect, statement, bind=None,
|
||||
compile_kwargs=util.immutabledict()):
|
||||
"""Construct a new ``Compiled`` object.
|
||||
|
||||
:param dialect: ``Dialect`` to compile against.
|
||||
|
||||
:param statement: ``ClauseElement`` to be compiled.
|
||||
|
||||
:param bind: Optional Engine or Connection to compile this
|
||||
statement against.
|
||||
|
||||
:param compile_kwargs: additional kwargs that will be
|
||||
passed to the initial call to :meth:`.Compiled.process`.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
|
||||
"""
|
||||
|
||||
self.dialect = dialect
|
||||
self.bind = bind
|
||||
if statement is not None:
|
||||
self.statement = statement
|
||||
self.can_execute = statement.supports_execution
|
||||
self.string = self.process(self.statement, **compile_kwargs)
|
||||
|
||||
@util.deprecated("0.7", ":class:`.Compiled` objects now compile "
|
||||
"within the constructor.")
|
||||
def compile(self):
|
||||
"""Produce the internal string representation of this element."""
|
||||
pass
|
||||
|
||||
@property
|
||||
def sql_compiler(self):
|
||||
"""Return a Compiled that is capable of processing SQL expressions.
|
||||
|
||||
If this compiler is one, it would likely just return 'self'.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def process(self, obj, **kwargs):
|
||||
return obj._compiler_dispatch(self, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
"""Return the string text of the generated SQL or DDL."""
|
||||
|
||||
return self.string or ''
|
||||
|
||||
def construct_params(self, params=None):
|
||||
"""Return the bind params for this compiled object.
|
||||
|
||||
:param params: a dict of string/object pairs whose values will
|
||||
override bind values compiled in to the
|
||||
statement.
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def params(self):
|
||||
"""Return the bind params for this compiled object."""
|
||||
return self.construct_params()
|
||||
|
||||
def execute(self, *multiparams, **params):
|
||||
"""Execute this compiled object."""
|
||||
|
||||
e = self.bind
|
||||
if e is None:
|
||||
raise exc.UnboundExecutionError(
|
||||
"This Compiled object is not bound to any Engine "
|
||||
"or Connection.")
|
||||
return e._execute_compiled(self, multiparams, params)
|
||||
|
||||
def scalar(self, *multiparams, **params):
|
||||
"""Execute this compiled object and return the result's
|
||||
scalar value."""
|
||||
|
||||
return self.execute(*multiparams, **params).scalar()
|
||||
|
||||
|
||||
class TypeCompiler(object):
|
||||
"""Produces DDL specification for TypeEngine objects."""
|
||||
|
||||
def __init__(self, dialect):
|
||||
self.dialect = dialect
|
||||
|
||||
def process(self, type_):
|
||||
return type_._compiler_dispatch(self)
|
||||
|
||||
|
||||
class Connectable(object):
|
||||
"""Interface for an object which supports execution of SQL constructs.
|
||||
|
||||
@ -787,6 +880,8 @@ class Connectable(object):
|
||||
|
||||
"""
|
||||
|
||||
dispatch = event.dispatcher(events.ConnectionEvents)
|
||||
|
||||
def connect(self, **kwargs):
|
||||
"""Return a :class:`.Connection` object.
|
||||
|
||||
@ -815,8 +910,7 @@ class Connectable(object):
|
||||
"object directly, i.e. :meth:`.Table.create`, "
|
||||
":meth:`.Index.create`, :meth:`.MetaData.create_all`")
|
||||
def create(self, entity, **kwargs):
|
||||
"""Emit CREATE statements for the given schema entity.
|
||||
"""
|
||||
"""Emit CREATE statements for the given schema entity."""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
@ -825,8 +919,7 @@ class Connectable(object):
|
||||
"object directly, i.e. :meth:`.Table.drop`, "
|
||||
":meth:`.Index.drop`, :meth:`.MetaData.drop_all`")
|
||||
def drop(self, entity, **kwargs):
|
||||
"""Emit DROP statements for the given schema entity.
|
||||
"""
|
||||
"""Emit DROP statements for the given schema entity."""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
|
@ -25,9 +25,9 @@ methods such as get_table_names, get_columns, etc.
|
||||
"""
|
||||
|
||||
from .. import exc, sql
|
||||
from ..sql import schema as sa_schema
|
||||
from .. import schema as sa_schema
|
||||
from .. import util
|
||||
from ..sql.type_api import TypeEngine
|
||||
from ..types import TypeEngine
|
||||
from ..util import deprecated
|
||||
from ..util import topological
|
||||
from .. import inspection
|
||||
@ -41,12 +41,8 @@ def cache(fn, self, con, *args, **kw):
|
||||
return fn(self, con, *args, **kw)
|
||||
key = (
|
||||
fn.__name__,
|
||||
tuple(a for a in args if isinstance(a, util.string_types)),
|
||||
tuple((k, v) for k, v in kw.items() if
|
||||
isinstance(v,
|
||||
util.string_types + util.int_types + (float, )
|
||||
)
|
||||
)
|
||||
tuple(a for a in args if isinstance(a, basestring)),
|
||||
tuple((k, v) for k, v in kw.iteritems() if isinstance(v, (basestring, int, float)))
|
||||
)
|
||||
ret = info_cache.get(key)
|
||||
if ret is None:
|
||||
@ -169,7 +165,7 @@ class Inspector(object):
|
||||
database's default schema is
|
||||
used, else the named schema is searched. If the database does not
|
||||
support named schemas, behavior is undefined if ``schema`` is not
|
||||
passed as ``None``. For special quoting, use :class:`.quoted_name`.
|
||||
passed as ``None``.
|
||||
|
||||
:param order_by: Optional, may be the string "foreign_key" to sort
|
||||
the result on foreign key dependencies.
|
||||
@ -206,13 +202,6 @@ class Inspector(object):
|
||||
|
||||
This currently includes some options that apply to MySQL tables.
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:param schema: string schema name; if omitted, uses the default schema
|
||||
of the database connection. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
"""
|
||||
if hasattr(self.dialect, 'get_table_options'):
|
||||
return self.dialect.get_table_options(
|
||||
@ -224,8 +213,6 @@ class Inspector(object):
|
||||
"""Return all view names in `schema`.
|
||||
|
||||
:param schema: Optional, retrieve names from a non-default schema.
|
||||
For special quoting, use :class:`.quoted_name`.
|
||||
|
||||
"""
|
||||
|
||||
return self.dialect.get_view_names(self.bind, schema,
|
||||
@ -235,8 +222,6 @@ class Inspector(object):
|
||||
"""Return definition for `view_name`.
|
||||
|
||||
:param schema: Optional, retrieve names from a non-default schema.
|
||||
For special quoting, use :class:`.quoted_name`.
|
||||
|
||||
"""
|
||||
|
||||
return self.dialect.get_view_definition(
|
||||
@ -262,14 +247,6 @@ class Inspector(object):
|
||||
|
||||
attrs
|
||||
dict containing optional column attributes
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:param schema: string schema name; if omitted, uses the default schema
|
||||
of the database connection. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
"""
|
||||
|
||||
col_defs = self.dialect.get_columns(self.bind, table_name, schema,
|
||||
@ -307,13 +284,6 @@ class Inspector(object):
|
||||
name
|
||||
optional name of the primary key constraint.
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:param schema: string schema name; if omitted, uses the default schema
|
||||
of the database connection. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
"""
|
||||
return self.dialect.get_pk_constraint(self.bind, table_name, schema,
|
||||
info_cache=self.info_cache,
|
||||
@ -341,13 +311,6 @@ class Inspector(object):
|
||||
name
|
||||
optional name of the foreign key constraint.
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:param schema: string schema name; if omitted, uses the default schema
|
||||
of the database connection. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
"""
|
||||
|
||||
return self.dialect.get_foreign_keys(self.bind, table_name, schema,
|
||||
@ -369,13 +332,6 @@ class Inspector(object):
|
||||
unique
|
||||
boolean
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:param schema: string schema name; if omitted, uses the default schema
|
||||
of the database connection. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
"""
|
||||
|
||||
return self.dialect.get_indexes(self.bind, table_name,
|
||||
@ -394,13 +350,6 @@ class Inspector(object):
|
||||
column_names
|
||||
list of column names in order
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:param schema: string schema name; if omitted, uses the default schema
|
||||
of the database connection. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
.. versionadded:: 0.8.4
|
||||
|
||||
"""
|
||||
@ -431,36 +380,38 @@ class Inspector(object):
|
||||
"""
|
||||
dialect = self.bind.dialect
|
||||
|
||||
# table attributes we might need.
|
||||
reflection_options = dict(
|
||||
(k, table.kwargs.get(k))
|
||||
for k in dialect.reflection_options if k in table.kwargs)
|
||||
|
||||
schema = table.schema
|
||||
table_name = table.name
|
||||
|
||||
# get table-level arguments that are specifically
|
||||
# intended for reflection, e.g. oracle_resolve_synonyms.
|
||||
# these are unconditionally passed to related Table
|
||||
# objects
|
||||
reflection_options = dict(
|
||||
(k, table.dialect_kwargs.get(k))
|
||||
for k in dialect.reflection_options
|
||||
if k in table.dialect_kwargs
|
||||
)
|
||||
|
||||
# reflect table options, like mysql_engine
|
||||
tbl_opts = self.get_table_options(table_name, schema, **table.dialect_kwargs)
|
||||
# apply table options
|
||||
tbl_opts = self.get_table_options(table_name, schema, **table.kwargs)
|
||||
if tbl_opts:
|
||||
# add additional kwargs to the Table if the dialect
|
||||
# returned them
|
||||
table._validate_dialect_kwargs(tbl_opts)
|
||||
table.kwargs.update(tbl_opts)
|
||||
|
||||
if util.py2k:
|
||||
if isinstance(schema, str):
|
||||
schema = schema.decode(dialect.encoding)
|
||||
if isinstance(table_name, str):
|
||||
table_name = table_name.decode(dialect.encoding)
|
||||
# table.kwargs will need to be passed to each reflection method. Make
|
||||
# sure keywords are strings.
|
||||
tblkw = table.kwargs.copy()
|
||||
for (k, v) in tblkw.items():
|
||||
del tblkw[k]
|
||||
tblkw[str(k)] = v
|
||||
|
||||
# Py2K
|
||||
if isinstance(schema, str):
|
||||
schema = schema.decode(dialect.encoding)
|
||||
if isinstance(table_name, str):
|
||||
table_name = table_name.decode(dialect.encoding)
|
||||
# end Py2K
|
||||
|
||||
# columns
|
||||
found_table = False
|
||||
cols_by_orig_name = {}
|
||||
|
||||
for col_d in self.get_columns(table_name, schema, **table.dialect_kwargs):
|
||||
for col_d in self.get_columns(table_name, schema, **tblkw):
|
||||
found_table = True
|
||||
orig_name = col_d['name']
|
||||
|
||||
@ -473,12 +424,12 @@ class Inspector(object):
|
||||
continue
|
||||
|
||||
coltype = col_d['type']
|
||||
|
||||
col_kw = dict(
|
||||
(k, col_d[k])
|
||||
for k in ['nullable', 'autoincrement', 'quote', 'info', 'key']
|
||||
if k in col_d
|
||||
)
|
||||
col_kw = {
|
||||
'nullable': col_d['nullable'],
|
||||
}
|
||||
for k in ('autoincrement', 'quote', 'info', 'key'):
|
||||
if k in col_d:
|
||||
col_kw[k] = col_d[k]
|
||||
|
||||
colargs = []
|
||||
if col_d.get('default') is not None:
|
||||
@ -492,7 +443,7 @@ class Inspector(object):
|
||||
)
|
||||
|
||||
if 'sequence' in col_d:
|
||||
# TODO: mssql and sybase are using this.
|
||||
# TODO: mssql, maxdb and sybase are using this.
|
||||
seq = col_d['sequence']
|
||||
sequence = sa_schema.Sequence(seq['name'], 1, 1)
|
||||
if 'start' in seq:
|
||||
@ -504,29 +455,33 @@ class Inspector(object):
|
||||
cols_by_orig_name[orig_name] = col = \
|
||||
sa_schema.Column(name, coltype, *colargs, **col_kw)
|
||||
|
||||
if col.key in table.primary_key:
|
||||
col.primary_key = True
|
||||
table.append_column(col)
|
||||
|
||||
if not found_table:
|
||||
raise exc.NoSuchTableError(table.name)
|
||||
|
||||
pk_cons = self.get_pk_constraint(table_name, schema, **table.dialect_kwargs)
|
||||
# Primary keys
|
||||
pk_cons = self.get_pk_constraint(table_name, schema, **tblkw)
|
||||
if pk_cons:
|
||||
pk_cols = [
|
||||
cols_by_orig_name[pk]
|
||||
for pk in pk_cons['constrained_columns']
|
||||
if pk in cols_by_orig_name and pk not in exclude_columns
|
||||
]
|
||||
pk_cols += [
|
||||
pk
|
||||
for pk in table.primary_key
|
||||
if pk.key in exclude_columns
|
||||
]
|
||||
primary_key_constraint = sa_schema.PrimaryKeyConstraint(
|
||||
name=pk_cons.get('name'),
|
||||
*pk_cols
|
||||
)
|
||||
|
||||
# update pk constraint name
|
||||
table.primary_key.name = pk_cons.get('name')
|
||||
table.append_constraint(primary_key_constraint)
|
||||
|
||||
# tell the PKConstraint to re-initialize
|
||||
# it's column collection
|
||||
table.primary_key._reload(pk_cols)
|
||||
|
||||
fkeys = self.get_foreign_keys(table_name, schema, **table.dialect_kwargs)
|
||||
# Foreign keys
|
||||
fkeys = self.get_foreign_keys(table_name, schema, **tblkw)
|
||||
for fkey_d in fkeys:
|
||||
conname = fkey_d['name']
|
||||
# look for columns by orig name in cols_by_orig_name,
|
||||
@ -559,14 +514,9 @@ class Inspector(object):
|
||||
)
|
||||
for column in referred_columns:
|
||||
refspec.append(".".join([referred_table, column]))
|
||||
if 'options' in fkey_d:
|
||||
options = fkey_d['options']
|
||||
else:
|
||||
options = {}
|
||||
table.append_constraint(
|
||||
sa_schema.ForeignKeyConstraint(constrained_columns, refspec,
|
||||
conname, link_to_name=True,
|
||||
**options))
|
||||
conname, link_to_name=True))
|
||||
# Indexes
|
||||
indexes = self.get_indexes(table_name, schema)
|
||||
for index_d in indexes:
|
||||
|
@ -8,11 +8,10 @@
|
||||
and :class:`.RowProxy."""
|
||||
|
||||
|
||||
|
||||
from .. import exc, util
|
||||
from ..sql import expression, sqltypes
|
||||
from itertools import izip
|
||||
from .. import exc, types, util
|
||||
from ..sql import expression
|
||||
import collections
|
||||
import operator
|
||||
|
||||
# This reconstructor is necessary so that pickles with the C extension or
|
||||
# without use the same Binary format.
|
||||
@ -56,7 +55,7 @@ except ImportError:
|
||||
return list(self)
|
||||
|
||||
def __iter__(self):
|
||||
for processor, value in zip(self._processors, self._row):
|
||||
for processor, value in izip(self._processors, self._row):
|
||||
if processor is None:
|
||||
yield value
|
||||
else:
|
||||
@ -73,7 +72,7 @@ except ImportError:
|
||||
except TypeError:
|
||||
if isinstance(key, slice):
|
||||
l = []
|
||||
for processor, value in zip(self._processors[key],
|
||||
for processor, value in izip(self._processors[key],
|
||||
self._row[key]):
|
||||
if processor is None:
|
||||
l.append(value)
|
||||
@ -94,7 +93,7 @@ except ImportError:
|
||||
def __getattr__(self, name):
|
||||
try:
|
||||
return self[name]
|
||||
except KeyError as e:
|
||||
except KeyError, e:
|
||||
raise AttributeError(e.args[0])
|
||||
|
||||
|
||||
@ -126,28 +125,11 @@ class RowProxy(BaseRowProxy):
|
||||
|
||||
__hash__ = None
|
||||
|
||||
def _op(self, other, op):
|
||||
return op(tuple(self), tuple(other)) \
|
||||
if isinstance(other, RowProxy) \
|
||||
else op(tuple(self), other)
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._op(other, operator.lt)
|
||||
|
||||
def __le__(self, other):
|
||||
return self._op(other, operator.le)
|
||||
|
||||
def __ge__(self, other):
|
||||
return self._op(other, operator.ge)
|
||||
|
||||
def __gt__(self, other):
|
||||
return self._op(other, operator.gt)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._op(other, operator.eq)
|
||||
return other is self or other == tuple(self)
|
||||
|
||||
def __ne__(self, other):
|
||||
return self._op(other, operator.ne)
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __repr__(self):
|
||||
return repr(tuple(self))
|
||||
@ -160,7 +142,7 @@ class RowProxy(BaseRowProxy):
|
||||
def items(self):
|
||||
"""Return a list of tuples, each tuple containing a key/value pair."""
|
||||
# TODO: no coverage here
|
||||
return [(key, self[key]) for key in self.keys()]
|
||||
return [(key, self[key]) for key in self.iterkeys()]
|
||||
|
||||
def keys(self):
|
||||
"""Return the list of keys as strings represented by this RowProxy."""
|
||||
@ -223,10 +205,10 @@ class ResultMetaData(object):
|
||||
else colname.lower()]
|
||||
except KeyError:
|
||||
name, obj, type_ = \
|
||||
colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
|
||||
colname, None, typemap.get(coltype, types.NULLTYPE)
|
||||
else:
|
||||
name, obj, type_ = \
|
||||
colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
|
||||
colname, None, typemap.get(coltype, types.NULLTYPE)
|
||||
|
||||
processor = context.get_result_processor(type_, colname, coltype)
|
||||
|
||||
@ -292,7 +274,7 @@ class ResultMetaData(object):
|
||||
def _key_fallback(self, key, raiseerr=True):
|
||||
map = self._keymap
|
||||
result = None
|
||||
if isinstance(key, util.string_types):
|
||||
if isinstance(key, basestring):
|
||||
result = map.get(key if self.case_sensitive else key.lower())
|
||||
# fallback for targeting a ColumnElement to a textual expression
|
||||
# this is a rare use case which only occurs when matching text()
|
||||
@ -346,8 +328,8 @@ class ResultMetaData(object):
|
||||
return {
|
||||
'_pickled_keymap': dict(
|
||||
(key, index)
|
||||
for key, (processor, obj, index) in self._keymap.items()
|
||||
if isinstance(key, util.string_types + util.int_types)
|
||||
for key, (processor, obj, index) in self._keymap.iteritems()
|
||||
if isinstance(key, (basestring, int))
|
||||
),
|
||||
'keys': self.keys,
|
||||
"case_sensitive": self.case_sensitive,
|
||||
@ -356,9 +338,9 @@ class ResultMetaData(object):
|
||||
def __setstate__(self, state):
|
||||
# the row has been processed at pickling time so we don't need any
|
||||
# processor anymore
|
||||
self._processors = [None for _ in range(len(state['keys']))]
|
||||
self._processors = [None for _ in xrange(len(state['keys']))]
|
||||
self._keymap = keymap = {}
|
||||
for key, index in state['_pickled_keymap'].items():
|
||||
for key, index in state['_pickled_keymap'].iteritems():
|
||||
# not preserving "obj" here, unfortunately our
|
||||
# proxy comparison fails with the unpickle
|
||||
keymap[key] = (None, None, index)
|
||||
@ -458,7 +440,7 @@ class ResultProxy(object):
|
||||
"""
|
||||
try:
|
||||
return self.context.rowcount
|
||||
except Exception as e:
|
||||
except Exception, e:
|
||||
self.connection._handle_dbapi_exception(
|
||||
e, None, None, self.cursor, self.context)
|
||||
|
||||
@ -480,7 +462,7 @@ class ResultProxy(object):
|
||||
"""
|
||||
try:
|
||||
return self._saved_cursor.lastrowid
|
||||
except Exception as e:
|
||||
except Exception, e:
|
||||
self.connection._handle_dbapi_exception(
|
||||
e, None, None,
|
||||
self._saved_cursor, self.context)
|
||||
@ -639,24 +621,6 @@ class ResultProxy(object):
|
||||
else:
|
||||
return self.context.compiled_parameters[0]
|
||||
|
||||
@property
|
||||
def returned_defaults(self):
|
||||
"""Return the values of default columns that were fetched using
|
||||
the :meth:`.ValuesBase.return_defaults` feature.
|
||||
|
||||
The value is an instance of :class:`.RowProxy`, or ``None``
|
||||
if :meth:`.ValuesBase.return_defaults` was not used or if the
|
||||
backend does not support RETURNING.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.ValuesBase.return_defaults`
|
||||
|
||||
"""
|
||||
return self.context.returned_defaults
|
||||
|
||||
def lastrow_has_defaults(self):
|
||||
"""Return ``lastrow_has_defaults()`` from the underlying
|
||||
:class:`.ExecutionContext`.
|
||||
@ -782,7 +746,7 @@ class ResultProxy(object):
|
||||
l = self.process_rows(self._fetchall_impl())
|
||||
self.close()
|
||||
return l
|
||||
except Exception as e:
|
||||
except Exception, e:
|
||||
self.connection._handle_dbapi_exception(
|
||||
e, None, None,
|
||||
self.cursor, self.context)
|
||||
@ -801,7 +765,7 @@ class ResultProxy(object):
|
||||
if len(l) == 0:
|
||||
self.close()
|
||||
return l
|
||||
except Exception as e:
|
||||
except Exception, e:
|
||||
self.connection._handle_dbapi_exception(
|
||||
e, None, None,
|
||||
self.cursor, self.context)
|
||||
@ -820,7 +784,7 @@ class ResultProxy(object):
|
||||
else:
|
||||
self.close()
|
||||
return None
|
||||
except Exception as e:
|
||||
except Exception, e:
|
||||
self.connection._handle_dbapi_exception(
|
||||
e, None, None,
|
||||
self.cursor, self.context)
|
||||
@ -836,7 +800,7 @@ class ResultProxy(object):
|
||||
|
||||
try:
|
||||
row = self._fetchone_impl()
|
||||
except Exception as e:
|
||||
except Exception, e:
|
||||
self.connection._handle_dbapi_exception(
|
||||
e, None, None,
|
||||
self.cursor, self.context)
|
||||
@ -1002,9 +966,9 @@ class BufferedColumnResultProxy(ResultProxy):
|
||||
# constructed.
|
||||
metadata._orig_processors = metadata._processors
|
||||
# replace the all type processors by None processors.
|
||||
metadata._processors = [None for _ in range(len(metadata.keys))]
|
||||
metadata._processors = [None for _ in xrange(len(metadata.keys))]
|
||||
keymap = {}
|
||||
for k, (func, obj, index) in metadata._keymap.items():
|
||||
for k, (func, obj, index) in metadata._keymap.iteritems():
|
||||
keymap[k] = (None, obj, index)
|
||||
self._metadata._keymap = keymap
|
||||
|
||||
@ -1025,7 +989,7 @@ class BufferedColumnResultProxy(ResultProxy):
|
||||
if size is None:
|
||||
return self.fetchall()
|
||||
l = []
|
||||
for i in range(size):
|
||||
for i in xrange(size):
|
||||
row = self.fetchone()
|
||||
if row is None:
|
||||
break
|
||||
|
@ -49,27 +49,18 @@ class DefaultEngineStrategy(EngineStrategy):
|
||||
|
||||
dialect_cls = u.get_dialect()
|
||||
|
||||
if kwargs.pop('_coerce_config', False):
|
||||
def pop_kwarg(key, default=None):
|
||||
value = kwargs.pop(key, default)
|
||||
if key in dialect_cls.engine_config_types:
|
||||
value = dialect_cls.engine_config_types[key](value)
|
||||
return value
|
||||
else:
|
||||
pop_kwarg = kwargs.pop
|
||||
|
||||
dialect_args = {}
|
||||
# consume dialect arguments from kwargs
|
||||
for k in util.get_cls_kwargs(dialect_cls):
|
||||
if k in kwargs:
|
||||
dialect_args[k] = pop_kwarg(k)
|
||||
dialect_args[k] = kwargs.pop(k)
|
||||
|
||||
dbapi = kwargs.pop('module', None)
|
||||
if dbapi is None:
|
||||
dbapi_args = {}
|
||||
for k in util.get_func_kwargs(dialect_cls.dbapi):
|
||||
if k in kwargs:
|
||||
dbapi_args[k] = pop_kwarg(k)
|
||||
dbapi_args[k] = kwargs.pop(k)
|
||||
dbapi = dialect_cls.dbapi(**dbapi_args)
|
||||
|
||||
dialect_args['dbapi'] = dbapi
|
||||
@ -79,26 +70,32 @@ class DefaultEngineStrategy(EngineStrategy):
|
||||
|
||||
# assemble connection arguments
|
||||
(cargs, cparams) = dialect.create_connect_args(u)
|
||||
cparams.update(pop_kwarg('connect_args', {}))
|
||||
cparams.update(kwargs.pop('connect_args', {}))
|
||||
|
||||
# look for existing pool or create
|
||||
pool = pop_kwarg('pool', None)
|
||||
pool = kwargs.pop('pool', None)
|
||||
if pool is None:
|
||||
def connect():
|
||||
try:
|
||||
return dialect.connect(*cargs, **cparams)
|
||||
except dialect.dbapi.Error as e:
|
||||
except dialect.dbapi.Error, e:
|
||||
invalidated = dialect.is_disconnect(e, None, None)
|
||||
util.raise_from_cause(
|
||||
exc.DBAPIError.instance(None, None,
|
||||
e, dialect.dbapi.Error,
|
||||
connection_invalidated=invalidated
|
||||
)
|
||||
)
|
||||
# Py3K
|
||||
#raise exc.DBAPIError.instance(None, None,
|
||||
# e, dialect.dbapi.Error,
|
||||
# connection_invalidated=invalidated
|
||||
#) from e
|
||||
# Py2K
|
||||
import sys
|
||||
raise exc.DBAPIError.instance(
|
||||
None, None, e, dialect.dbapi.Error,
|
||||
connection_invalidated=invalidated
|
||||
), None, sys.exc_info()[2]
|
||||
# end Py2K
|
||||
|
||||
creator = pop_kwarg('creator', connect)
|
||||
creator = kwargs.pop('creator', connect)
|
||||
|
||||
poolclass = pop_kwarg('poolclass', None)
|
||||
poolclass = kwargs.pop('poolclass', None)
|
||||
if poolclass is None:
|
||||
poolclass = dialect_cls.get_pool_class(u)
|
||||
pool_args = {}
|
||||
@ -115,7 +112,7 @@ class DefaultEngineStrategy(EngineStrategy):
|
||||
for k in util.get_cls_kwargs(poolclass):
|
||||
tk = translate.get(k, k)
|
||||
if tk in kwargs:
|
||||
pool_args[k] = pop_kwarg(tk)
|
||||
pool_args[k] = kwargs.pop(tk)
|
||||
pool = poolclass(creator, **pool_args)
|
||||
else:
|
||||
if isinstance(pool, poollib._DBProxy):
|
||||
@ -128,7 +125,7 @@ class DefaultEngineStrategy(EngineStrategy):
|
||||
engine_args = {}
|
||||
for k in util.get_cls_kwargs(engineclass):
|
||||
if k in kwargs:
|
||||
engine_args[k] = pop_kwarg(k)
|
||||
engine_args[k] = kwargs.pop(k)
|
||||
|
||||
_initialize = kwargs.pop('_initialize', True)
|
||||
|
||||
@ -158,12 +155,18 @@ class DefaultEngineStrategy(EngineStrategy):
|
||||
event.listen(pool, 'first_connect', on_connect)
|
||||
event.listen(pool, 'connect', on_connect)
|
||||
|
||||
@util.only_once
|
||||
def first_connect(dbapi_connection, connection_record):
|
||||
c = base.Connection(engine, connection=dbapi_connection,
|
||||
_has_events=False)
|
||||
c = base.Connection(engine, connection=dbapi_connection)
|
||||
|
||||
# TODO: removing this allows the on connect activities
|
||||
# to generate events. tests currently assume these aren't
|
||||
# sent. do we want users to get all the initial connect
|
||||
# activities as events ?
|
||||
c._has_events = False
|
||||
|
||||
dialect.initialize(c)
|
||||
event.listen(pool, 'first_connect', first_connect, once=True)
|
||||
event.listen(pool, 'first_connect', first_connect)
|
||||
|
||||
return engine
|
||||
|
||||
|
@ -14,9 +14,9 @@ be used directly and is also accepted directly by ``create_engine()``.
|
||||
"""
|
||||
|
||||
import re
|
||||
import urllib
|
||||
from .. import exc, util
|
||||
from . import Dialect
|
||||
from ..dialects import registry
|
||||
|
||||
|
||||
class URL(object):
|
||||
@ -65,10 +65,10 @@ class URL(object):
|
||||
def __to_string__(self, hide_password=True):
|
||||
s = self.drivername + "://"
|
||||
if self.username is not None:
|
||||
s += _rfc_1738_quote(self.username)
|
||||
s += self.username
|
||||
if self.password is not None:
|
||||
s += ':' + ('***' if hide_password
|
||||
else _rfc_1738_quote(self.password))
|
||||
else urllib.quote_plus(self.password))
|
||||
s += "@"
|
||||
if self.host is not None:
|
||||
if ':' in self.host:
|
||||
@ -80,7 +80,7 @@ class URL(object):
|
||||
if self.database is not None:
|
||||
s += '/' + self.database
|
||||
if self.query:
|
||||
keys = list(self.query)
|
||||
keys = self.query.keys()
|
||||
keys.sort()
|
||||
s += '?' + "&".join("%s=%s" % (k, self.query[k]) for k in keys)
|
||||
return s
|
||||
@ -113,6 +113,7 @@ class URL(object):
|
||||
name = self.drivername
|
||||
else:
|
||||
name = self.drivername.replace('+', '.')
|
||||
from sqlalchemy.dialects import registry
|
||||
cls = registry.load(name)
|
||||
# check for legacy dialects that
|
||||
# would return a module with 'dialect' as the
|
||||
@ -159,7 +160,7 @@ def make_url(name_or_url):
|
||||
existing URL object is passed, just returns the object.
|
||||
"""
|
||||
|
||||
if isinstance(name_or_url, util.string_types):
|
||||
if isinstance(name_or_url, basestring):
|
||||
return _parse_rfc1738_args(name_or_url)
|
||||
else:
|
||||
return name_or_url
|
||||
@ -170,7 +171,7 @@ def _parse_rfc1738_args(name):
|
||||
(?P<name>[\w\+]+)://
|
||||
(?:
|
||||
(?P<username>[^:/]*)
|
||||
(?::(?P<password>.*))?
|
||||
(?::(?P<password>[^/]*))?
|
||||
@)?
|
||||
(?:
|
||||
(?:
|
||||
@ -189,17 +190,17 @@ def _parse_rfc1738_args(name):
|
||||
tokens = components['database'].split('?', 2)
|
||||
components['database'] = tokens[0]
|
||||
query = (len(tokens) > 1 and dict(util.parse_qsl(tokens[1]))) or None
|
||||
if util.py2k and query is not None:
|
||||
# Py2K
|
||||
if query is not None:
|
||||
query = dict((k.encode('ascii'), query[k]) for k in query)
|
||||
# end Py2K
|
||||
else:
|
||||
query = None
|
||||
components['query'] = query
|
||||
|
||||
if components['username'] is not None:
|
||||
components['username'] = _rfc_1738_unquote(components['username'])
|
||||
|
||||
if components['password'] is not None:
|
||||
components['password'] = _rfc_1738_unquote(components['password'])
|
||||
components['password'] = \
|
||||
urllib.unquote_plus(components['password'])
|
||||
|
||||
ipv4host = components.pop('ipv4host')
|
||||
ipv6host = components.pop('ipv6host')
|
||||
@ -211,12 +212,6 @@ def _parse_rfc1738_args(name):
|
||||
"Could not parse rfc1738 URL from string '%s'" % name)
|
||||
|
||||
|
||||
def _rfc_1738_quote(text):
|
||||
return re.sub(r'[:@/]', lambda m: "%%%X" % ord(m.group(0)), text)
|
||||
|
||||
def _rfc_1738_unquote(text):
|
||||
return util.unquote(text)
|
||||
|
||||
def _parse_keyvalue_args(name):
|
||||
m = re.match(r'(\w+)://(.*)', name)
|
||||
if m is not None:
|
||||
|
@ -6,6 +6,28 @@
|
||||
|
||||
from .. import util
|
||||
|
||||
|
||||
def _coerce_config(configuration, prefix):
|
||||
"""Convert configuration values to expected types."""
|
||||
|
||||
options = dict((key[len(prefix):], configuration[key])
|
||||
for key in configuration
|
||||
if key.startswith(prefix))
|
||||
for option, type_ in (
|
||||
('convert_unicode', util.bool_or_str('force')),
|
||||
('pool_timeout', int),
|
||||
('echo', util.bool_or_str('debug')),
|
||||
('echo_pool', util.bool_or_str('debug')),
|
||||
('pool_recycle', int),
|
||||
('pool_size', int),
|
||||
('max_overflow', int),
|
||||
('pool_threadlocal', bool),
|
||||
('use_native_unicode', bool),
|
||||
):
|
||||
util.coerce_kw_type(options, option, type_)
|
||||
return options
|
||||
|
||||
|
||||
def connection_memoize(key):
|
||||
"""Decorator, memoize a function in a connection.info stash.
|
||||
|
||||
|
566
lib/sqlalchemy/event.py
Normal file
566
lib/sqlalchemy/event.py
Normal file
@ -0,0 +1,566 @@
|
||||
# sqlalchemy/event.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Base event API."""
|
||||
from __future__ import with_statement
|
||||
|
||||
from . import util, exc
|
||||
from .util import threading
|
||||
from itertools import chain
|
||||
import weakref
|
||||
|
||||
CANCEL = util.symbol('CANCEL')
|
||||
NO_RETVAL = util.symbol('NO_RETVAL')
|
||||
|
||||
|
||||
def listen(target, identifier, fn, *args, **kw):
|
||||
"""Register a listener function for the given target.
|
||||
|
||||
e.g.::
|
||||
|
||||
from sqlalchemy import event
|
||||
from sqlalchemy.schema import UniqueConstraint
|
||||
|
||||
def unique_constraint_name(const, table):
|
||||
const.name = "uq_%s_%s" % (
|
||||
table.name,
|
||||
list(const.columns)[0].name
|
||||
)
|
||||
event.listen(
|
||||
UniqueConstraint,
|
||||
"after_parent_attach",
|
||||
unique_constraint_name)
|
||||
|
||||
"""
|
||||
|
||||
for evt_cls in _registrars[identifier]:
|
||||
tgt = evt_cls._accept_with(target)
|
||||
if tgt is not None:
|
||||
tgt.dispatch._listen(tgt, identifier, fn, *args, **kw)
|
||||
return
|
||||
raise exc.InvalidRequestError("No such event '%s' for target '%s'" %
|
||||
(identifier, target))
|
||||
|
||||
|
||||
def listens_for(target, identifier, *args, **kw):
|
||||
"""Decorate a function as a listener for the given target + identifier.
|
||||
|
||||
e.g.::
|
||||
|
||||
from sqlalchemy import event
|
||||
from sqlalchemy.schema import UniqueConstraint
|
||||
|
||||
@event.listens_for(UniqueConstraint, "after_parent_attach")
|
||||
def unique_constraint_name(const, table):
|
||||
const.name = "uq_%s_%s" % (
|
||||
table.name,
|
||||
list(const.columns)[0].name
|
||||
)
|
||||
"""
|
||||
def decorate(fn):
|
||||
listen(target, identifier, fn, *args, **kw)
|
||||
return fn
|
||||
return decorate
|
||||
|
||||
|
||||
def remove(target, identifier, fn):
|
||||
"""Remove an event listener.
|
||||
|
||||
Note that some event removals, particularly for those event dispatchers
|
||||
which create wrapper functions and secondary even listeners, may not yet
|
||||
be supported.
|
||||
|
||||
"""
|
||||
for evt_cls in _registrars[identifier]:
|
||||
for tgt in evt_cls._accept_with(target):
|
||||
tgt.dispatch._remove(identifier, tgt, fn)
|
||||
return
|
||||
|
||||
_registrars = util.defaultdict(list)
|
||||
|
||||
|
||||
def _is_event_name(name):
|
||||
return not name.startswith('_') and name != 'dispatch'
|
||||
|
||||
|
||||
class _UnpickleDispatch(object):
|
||||
"""Serializable callable that re-generates an instance of
|
||||
:class:`_Dispatch` given a particular :class:`.Events` subclass.
|
||||
|
||||
"""
|
||||
def __call__(self, _parent_cls):
|
||||
for cls in _parent_cls.__mro__:
|
||||
if 'dispatch' in cls.__dict__:
|
||||
return cls.__dict__['dispatch'].dispatch_cls(_parent_cls)
|
||||
else:
|
||||
raise AttributeError("No class with a 'dispatch' member present.")
|
||||
|
||||
|
||||
class _Dispatch(object):
|
||||
"""Mirror the event listening definitions of an Events class with
|
||||
listener collections.
|
||||
|
||||
Classes which define a "dispatch" member will return a
|
||||
non-instantiated :class:`._Dispatch` subclass when the member
|
||||
is accessed at the class level. When the "dispatch" member is
|
||||
accessed at the instance level of its owner, an instance
|
||||
of the :class:`._Dispatch` class is returned.
|
||||
|
||||
A :class:`._Dispatch` class is generated for each :class:`.Events`
|
||||
class defined, by the :func:`._create_dispatcher_class` function.
|
||||
The original :class:`.Events` classes remain untouched.
|
||||
This decouples the construction of :class:`.Events` subclasses from
|
||||
the implementation used by the event internals, and allows
|
||||
inspecting tools like Sphinx to work in an unsurprising
|
||||
way against the public API.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, _parent_cls):
|
||||
self._parent_cls = _parent_cls
|
||||
|
||||
def _join(self, other):
|
||||
"""Create a 'join' of this :class:`._Dispatch` and another.
|
||||
|
||||
This new dispatcher will dispatch events to both
|
||||
:class:`._Dispatch` objects.
|
||||
|
||||
Once constructed, the joined dispatch will respond to new events
|
||||
added to this dispatcher, but may not be aware of events
|
||||
added to the other dispatcher after creation of the join. This is
|
||||
currently for performance reasons so that both dispatchers need
|
||||
not be "evaluated" fully on each call.
|
||||
|
||||
"""
|
||||
if '_joined_dispatch_cls' not in self.__class__.__dict__:
|
||||
cls = type(
|
||||
"Joined%s" % self.__class__.__name__,
|
||||
(_JoinedDispatcher, self.__class__), {}
|
||||
)
|
||||
for ls in _event_descriptors(self):
|
||||
setattr(cls, ls.name, _JoinedDispatchDescriptor(ls.name))
|
||||
|
||||
self.__class__._joined_dispatch_cls = cls
|
||||
return self._joined_dispatch_cls(self, other)
|
||||
|
||||
def __reduce__(self):
|
||||
return _UnpickleDispatch(), (self._parent_cls, )
|
||||
|
||||
def _update(self, other, only_propagate=True):
|
||||
"""Populate from the listeners in another :class:`_Dispatch`
|
||||
object."""
|
||||
|
||||
for ls in _event_descriptors(other):
|
||||
getattr(self, ls.name).\
|
||||
for_modify(self)._update(ls, only_propagate=only_propagate)
|
||||
|
||||
@util.hybridmethod
|
||||
def _clear(self):
|
||||
for attr in dir(self):
|
||||
if _is_event_name(attr):
|
||||
getattr(self, attr).for_modify(self).clear()
|
||||
|
||||
|
||||
def _event_descriptors(target):
|
||||
return [getattr(target, k) for k in dir(target) if _is_event_name(k)]
|
||||
|
||||
|
||||
class _EventMeta(type):
|
||||
"""Intercept new Event subclasses and create
|
||||
associated _Dispatch classes."""
|
||||
|
||||
def __init__(cls, classname, bases, dict_):
|
||||
_create_dispatcher_class(cls, classname, bases, dict_)
|
||||
return type.__init__(cls, classname, bases, dict_)
|
||||
|
||||
|
||||
def _create_dispatcher_class(cls, classname, bases, dict_):
|
||||
"""Create a :class:`._Dispatch` class corresponding to an
|
||||
:class:`.Events` class."""
|
||||
|
||||
# there's all kinds of ways to do this,
|
||||
# i.e. make a Dispatch class that shares the '_listen' method
|
||||
# of the Event class, this is the straight monkeypatch.
|
||||
dispatch_base = getattr(cls, 'dispatch', _Dispatch)
|
||||
cls.dispatch = dispatch_cls = type("%sDispatch" % classname,
|
||||
(dispatch_base, ), {})
|
||||
dispatch_cls._listen = cls._listen
|
||||
|
||||
for k in dict_:
|
||||
if _is_event_name(k):
|
||||
setattr(dispatch_cls, k, _DispatchDescriptor(dict_[k]))
|
||||
_registrars[k].append(cls)
|
||||
|
||||
|
||||
def _remove_dispatcher(cls):
|
||||
for k in dir(cls):
|
||||
if _is_event_name(k):
|
||||
_registrars[k].remove(cls)
|
||||
if not _registrars[k]:
|
||||
del _registrars[k]
|
||||
|
||||
|
||||
class Events(object):
|
||||
"""Define event listening functions for a particular target type."""
|
||||
|
||||
__metaclass__ = _EventMeta
|
||||
|
||||
@classmethod
|
||||
def _accept_with(cls, target):
|
||||
# Mapper, ClassManager, Session override this to
|
||||
# also accept classes, scoped_sessions, sessionmakers, etc.
|
||||
if hasattr(target, 'dispatch') and (
|
||||
isinstance(target.dispatch, cls.dispatch) or \
|
||||
isinstance(target.dispatch, type) and \
|
||||
issubclass(target.dispatch, cls.dispatch)
|
||||
):
|
||||
return target
|
||||
else:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _listen(cls, target, identifier, fn, propagate=False, insert=False):
|
||||
if insert:
|
||||
getattr(target.dispatch, identifier).\
|
||||
for_modify(target.dispatch).insert(fn, target, propagate)
|
||||
else:
|
||||
getattr(target.dispatch, identifier).\
|
||||
for_modify(target.dispatch).append(fn, target, propagate)
|
||||
|
||||
@classmethod
|
||||
def _remove(cls, target, identifier, fn):
|
||||
getattr(target.dispatch, identifier).remove(fn, target)
|
||||
|
||||
@classmethod
|
||||
def _clear(cls):
|
||||
cls.dispatch._clear()
|
||||
|
||||
|
||||
class _DispatchDescriptor(object):
|
||||
"""Class-level attributes on :class:`._Dispatch` classes."""
|
||||
|
||||
def __init__(self, fn):
|
||||
self.__name__ = fn.__name__
|
||||
self.__doc__ = fn.__doc__
|
||||
self._clslevel = weakref.WeakKeyDictionary()
|
||||
self._empty_listeners = weakref.WeakKeyDictionary()
|
||||
|
||||
def _contains(self, cls, evt):
|
||||
return cls in self._clslevel and \
|
||||
evt in self._clslevel[cls]
|
||||
|
||||
def insert(self, obj, target, propagate):
|
||||
assert isinstance(target, type), \
|
||||
"Class-level Event targets must be classes."
|
||||
stack = [target]
|
||||
while stack:
|
||||
cls = stack.pop(0)
|
||||
stack.extend(cls.__subclasses__())
|
||||
if cls is not target and cls not in self._clslevel:
|
||||
self.update_subclass(cls)
|
||||
else:
|
||||
if cls not in self._clslevel:
|
||||
self._clslevel[cls] = []
|
||||
self._clslevel[cls].insert(0, obj)
|
||||
|
||||
def append(self, obj, target, propagate):
|
||||
assert isinstance(target, type), \
|
||||
"Class-level Event targets must be classes."
|
||||
|
||||
stack = [target]
|
||||
while stack:
|
||||
cls = stack.pop(0)
|
||||
stack.extend(cls.__subclasses__())
|
||||
if cls is not target and cls not in self._clslevel:
|
||||
self.update_subclass(cls)
|
||||
else:
|
||||
if cls not in self._clslevel:
|
||||
self._clslevel[cls] = []
|
||||
self._clslevel[cls].append(obj)
|
||||
|
||||
def update_subclass(self, target):
|
||||
if target not in self._clslevel:
|
||||
self._clslevel[target] = []
|
||||
clslevel = self._clslevel[target]
|
||||
for cls in target.__mro__[1:]:
|
||||
if cls in self._clslevel:
|
||||
clslevel.extend([
|
||||
fn for fn
|
||||
in self._clslevel[cls]
|
||||
if fn not in clslevel
|
||||
])
|
||||
|
||||
def remove(self, obj, target):
|
||||
stack = [target]
|
||||
while stack:
|
||||
cls = stack.pop(0)
|
||||
stack.extend(cls.__subclasses__())
|
||||
if cls in self._clslevel:
|
||||
self._clslevel[cls].remove(obj)
|
||||
|
||||
def clear(self):
|
||||
"""Clear all class level listeners"""
|
||||
|
||||
for dispatcher in self._clslevel.values():
|
||||
dispatcher[:] = []
|
||||
|
||||
def for_modify(self, obj):
|
||||
"""Return an event collection which can be modified.
|
||||
|
||||
For _DispatchDescriptor at the class level of
|
||||
a dispatcher, this returns self.
|
||||
|
||||
"""
|
||||
return self
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return self
|
||||
elif obj._parent_cls in self._empty_listeners:
|
||||
ret = self._empty_listeners[obj._parent_cls]
|
||||
else:
|
||||
self._empty_listeners[obj._parent_cls] = ret = \
|
||||
_EmptyListener(self, obj._parent_cls)
|
||||
# assigning it to __dict__ means
|
||||
# memoized for fast re-access. but more memory.
|
||||
obj.__dict__[self.__name__] = ret
|
||||
return ret
|
||||
|
||||
|
||||
class _EmptyListener(object):
|
||||
"""Serves as a class-level interface to the events
|
||||
served by a _DispatchDescriptor, when there are no
|
||||
instance-level events present.
|
||||
|
||||
Is replaced by _ListenerCollection when instance-level
|
||||
events are added.
|
||||
|
||||
"""
|
||||
def __init__(self, parent, target_cls):
|
||||
if target_cls not in parent._clslevel:
|
||||
parent.update_subclass(target_cls)
|
||||
self.parent = parent
|
||||
self.parent_listeners = parent._clslevel[target_cls]
|
||||
self.name = parent.__name__
|
||||
self.propagate = frozenset()
|
||||
self.listeners = ()
|
||||
|
||||
def for_modify(self, obj):
|
||||
"""Return an event collection which can be modified.
|
||||
|
||||
For _EmptyListener at the instance level of
|
||||
a dispatcher, this generates a new
|
||||
_ListenerCollection, applies it to the instance,
|
||||
and returns it.
|
||||
|
||||
"""
|
||||
result = _ListenerCollection(self.parent, obj._parent_cls)
|
||||
if obj.__dict__[self.name] is self:
|
||||
obj.__dict__[self.name] = result
|
||||
return result
|
||||
|
||||
def _needs_modify(self, *args, **kw):
|
||||
raise NotImplementedError("need to call for_modify()")
|
||||
|
||||
exec_once = insert = append = remove = clear = _needs_modify
|
||||
|
||||
def __call__(self, *args, **kw):
|
||||
"""Execute this event."""
|
||||
|
||||
for fn in self.parent_listeners:
|
||||
fn(*args, **kw)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.parent_listeners)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.parent_listeners)
|
||||
|
||||
def __nonzero__(self):
|
||||
return bool(self.parent_listeners)
|
||||
|
||||
|
||||
class _CompoundListener(object):
|
||||
_exec_once = False
|
||||
|
||||
@util.memoized_property
|
||||
def _exec_once_mutex(self):
|
||||
return threading.Lock()
|
||||
|
||||
def exec_once(self, *args, **kw):
|
||||
"""Execute this event, but only if it has not been
|
||||
executed already for this collection."""
|
||||
|
||||
if not self._exec_once:
|
||||
with self._exec_once_mutex:
|
||||
if not self._exec_once:
|
||||
try:
|
||||
self(*args, **kw)
|
||||
finally:
|
||||
self._exec_once = True
|
||||
|
||||
|
||||
# I'm not entirely thrilled about the overhead here,
|
||||
# but this allows class-level listeners to be added
|
||||
# at any point.
|
||||
#
|
||||
# In the absense of instance-level listeners,
|
||||
# we stay with the _EmptyListener object when called
|
||||
# at the instance level.
|
||||
|
||||
def __call__(self, *args, **kw):
|
||||
"""Execute this event."""
|
||||
|
||||
for fn in self.parent_listeners:
|
||||
fn(*args, **kw)
|
||||
for fn in self.listeners:
|
||||
fn(*args, **kw)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.parent_listeners) + len(self.listeners)
|
||||
|
||||
def __iter__(self):
|
||||
return chain(self.parent_listeners, self.listeners)
|
||||
|
||||
def __nonzero__(self):
|
||||
return bool(self.listeners or self.parent_listeners)
|
||||
|
||||
|
||||
class _ListenerCollection(_CompoundListener):
|
||||
"""Instance-level attributes on instances of :class:`._Dispatch`.
|
||||
|
||||
Represents a collection of listeners.
|
||||
|
||||
As of 0.7.9, _ListenerCollection is only first
|
||||
created via the _EmptyListener.for_modify() method.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, parent, target_cls):
|
||||
if target_cls not in parent._clslevel:
|
||||
parent.update_subclass(target_cls)
|
||||
self.parent_listeners = parent._clslevel[target_cls]
|
||||
self.name = parent.__name__
|
||||
self.listeners = []
|
||||
self.propagate = set()
|
||||
|
||||
def for_modify(self, obj):
|
||||
"""Return an event collection which can be modified.
|
||||
|
||||
For _ListenerCollection at the instance level of
|
||||
a dispatcher, this returns self.
|
||||
|
||||
"""
|
||||
return self
|
||||
|
||||
def _update(self, other, only_propagate=True):
|
||||
"""Populate from the listeners in another :class:`_Dispatch`
|
||||
object."""
|
||||
|
||||
existing_listeners = self.listeners
|
||||
existing_listener_set = set(existing_listeners)
|
||||
self.propagate.update(other.propagate)
|
||||
existing_listeners.extend([l for l
|
||||
in other.listeners
|
||||
if l not in existing_listener_set
|
||||
and not only_propagate or l in self.propagate
|
||||
])
|
||||
|
||||
def insert(self, obj, target, propagate):
|
||||
if obj not in self.listeners:
|
||||
self.listeners.insert(0, obj)
|
||||
if propagate:
|
||||
self.propagate.add(obj)
|
||||
|
||||
def append(self, obj, target, propagate):
|
||||
if obj not in self.listeners:
|
||||
self.listeners.append(obj)
|
||||
if propagate:
|
||||
self.propagate.add(obj)
|
||||
|
||||
def remove(self, obj, target):
|
||||
if obj in self.listeners:
|
||||
self.listeners.remove(obj)
|
||||
self.propagate.discard(obj)
|
||||
|
||||
def clear(self):
|
||||
self.listeners[:] = []
|
||||
self.propagate.clear()
|
||||
|
||||
|
||||
class _JoinedDispatcher(object):
|
||||
"""Represent a connection between two _Dispatch objects."""
|
||||
|
||||
def __init__(self, local, parent):
|
||||
self.local = local
|
||||
self.parent = parent
|
||||
self._parent_cls = local._parent_cls
|
||||
|
||||
|
||||
class _JoinedDispatchDescriptor(object):
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return self
|
||||
else:
|
||||
obj.__dict__[self.name] = ret = _JoinedListener(
|
||||
obj.parent, self.name,
|
||||
getattr(obj.local, self.name)
|
||||
)
|
||||
return ret
|
||||
|
||||
|
||||
class _JoinedListener(_CompoundListener):
|
||||
_exec_once = False
|
||||
|
||||
def __init__(self, parent, name, local):
|
||||
self.parent = parent
|
||||
self.name = name
|
||||
self.local = local
|
||||
self.parent_listeners = self.local
|
||||
|
||||
# fix .listeners for the parent. This means
|
||||
# new events added to the parent won't be picked
|
||||
# up here. Alternatively, the listeners can
|
||||
# be via @property to just return getattr(self.parent, self.name)
|
||||
# each time. less performant.
|
||||
self.listeners = list(getattr(self.parent, self.name))
|
||||
|
||||
def for_modify(self, obj):
|
||||
self.local = self.parent_listeners = self.local.for_modify(obj)
|
||||
return self
|
||||
|
||||
def insert(self, obj, target, propagate):
|
||||
self.local.insert(obj, target, propagate)
|
||||
|
||||
def append(self, obj, target, propagate):
|
||||
self.local.append(obj, target, propagate)
|
||||
|
||||
def remove(self, obj, target):
|
||||
self.local.remove(obj, target)
|
||||
|
||||
def clear(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class dispatcher(object):
|
||||
"""Descriptor used by target classes to
|
||||
deliver the _Dispatch class at the class level
|
||||
and produce new _Dispatch instances for target
|
||||
instances.
|
||||
|
||||
"""
|
||||
def __init__(self, events):
|
||||
self.dispatch_cls = events.dispatch
|
||||
self.events = events
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return self.dispatch_cls
|
||||
obj.__dict__['dispatch'] = disp = self.dispatch_cls(cls)
|
||||
return disp
|
@ -1,131 +0,0 @@
|
||||
# event/api.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Public API functions for the event system.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .. import util, exc
|
||||
from .base import _registrars
|
||||
from .registry import _EventKey
|
||||
|
||||
CANCEL = util.symbol('CANCEL')
|
||||
NO_RETVAL = util.symbol('NO_RETVAL')
|
||||
|
||||
|
||||
def _event_key(target, identifier, fn):
|
||||
for evt_cls in _registrars[identifier]:
|
||||
tgt = evt_cls._accept_with(target)
|
||||
if tgt is not None:
|
||||
return _EventKey(target, identifier, fn, tgt)
|
||||
else:
|
||||
raise exc.InvalidRequestError("No such event '%s' for target '%s'" %
|
||||
(identifier, target))
|
||||
|
||||
def listen(target, identifier, fn, *args, **kw):
|
||||
"""Register a listener function for the given target.
|
||||
|
||||
e.g.::
|
||||
|
||||
from sqlalchemy import event
|
||||
from sqlalchemy.schema import UniqueConstraint
|
||||
|
||||
def unique_constraint_name(const, table):
|
||||
const.name = "uq_%s_%s" % (
|
||||
table.name,
|
||||
list(const.columns)[0].name
|
||||
)
|
||||
event.listen(
|
||||
UniqueConstraint,
|
||||
"after_parent_attach",
|
||||
unique_constraint_name)
|
||||
|
||||
|
||||
A given function can also be invoked for only the first invocation
|
||||
of the event using the ``once`` argument::
|
||||
|
||||
def on_config():
|
||||
do_config()
|
||||
|
||||
event.listen(Mapper, "before_configure", on_config, once=True)
|
||||
|
||||
.. versionadded:: 0.9.3 Added ``once=True`` to :func:`.event.listen`
|
||||
and :func:`.event.listens_for`.
|
||||
|
||||
"""
|
||||
|
||||
_event_key(target, identifier, fn).listen(*args, **kw)
|
||||
|
||||
|
||||
def listens_for(target, identifier, *args, **kw):
|
||||
"""Decorate a function as a listener for the given target + identifier.
|
||||
|
||||
e.g.::
|
||||
|
||||
from sqlalchemy import event
|
||||
from sqlalchemy.schema import UniqueConstraint
|
||||
|
||||
@event.listens_for(UniqueConstraint, "after_parent_attach")
|
||||
def unique_constraint_name(const, table):
|
||||
const.name = "uq_%s_%s" % (
|
||||
table.name,
|
||||
list(const.columns)[0].name
|
||||
)
|
||||
|
||||
A given function can also be invoked for only the first invocation
|
||||
of the event using the ``once`` argument::
|
||||
|
||||
@event.listens_for(Mapper, "before_configure", once=True)
|
||||
def on_config():
|
||||
do_config()
|
||||
|
||||
|
||||
.. versionadded:: 0.9.3 Added ``once=True`` to :func:`.event.listen`
|
||||
and :func:`.event.listens_for`.
|
||||
|
||||
"""
|
||||
def decorate(fn):
|
||||
listen(target, identifier, fn, *args, **kw)
|
||||
return fn
|
||||
return decorate
|
||||
|
||||
|
||||
def remove(target, identifier, fn):
|
||||
"""Remove an event listener.
|
||||
|
||||
The arguments here should match exactly those which were sent to
|
||||
:func:`.listen`; all the event registration which proceeded as a result
|
||||
of this call will be reverted by calling :func:`.remove` with the same
|
||||
arguments.
|
||||
|
||||
e.g.::
|
||||
|
||||
# if a function was registered like this...
|
||||
@event.listens_for(SomeMappedClass, "before_insert", propagate=True)
|
||||
def my_listener_function(*arg):
|
||||
pass
|
||||
|
||||
# ... it's removed like this
|
||||
event.remove(SomeMappedClass, "before_insert", my_listener_function)
|
||||
|
||||
Above, the listener function associated with ``SomeMappedClass`` was also
|
||||
propagated to subclasses of ``SomeMappedClass``; the :func:`.remove` function
|
||||
will revert all of these operations.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
"""
|
||||
_event_key(target, identifier, fn).remove()
|
||||
|
||||
def contains(target, identifier, fn):
|
||||
"""Return True if the given target/ident/fn is set up to listen.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
"""
|
||||
|
||||
return _event_key(target, identifier, fn).contains()
|
@ -1,386 +0,0 @@
|
||||
# event/attr.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Attribute implementation for _Dispatch classes.
|
||||
|
||||
The various listener targets for a particular event class are represented
|
||||
as attributes, which refer to collections of listeners to be fired off.
|
||||
These collections can exist at the class level as well as at the instance
|
||||
level. An event is fired off using code like this::
|
||||
|
||||
some_object.dispatch.first_connect(arg1, arg2)
|
||||
|
||||
Above, ``some_object.dispatch`` would be an instance of ``_Dispatch`` and
|
||||
``first_connect`` is typically an instance of ``_ListenerCollection``
|
||||
if event listeners are present, or ``_EmptyListener`` if none are present.
|
||||
|
||||
The attribute mechanics here spend effort trying to ensure listener functions
|
||||
are available with a minimum of function call overhead, that unnecessary
|
||||
objects aren't created (i.e. many empty per-instance listener collections),
|
||||
as well as that everything is garbage collectable when owning references are
|
||||
lost. Other features such as "propagation" of listener functions across
|
||||
many ``_Dispatch`` instances, "joining" of multiple ``_Dispatch`` instances,
|
||||
as well as support for subclass propagation (e.g. events assigned to
|
||||
``Pool`` vs. ``QueuePool``) are all implemented here.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, with_statement
|
||||
|
||||
from .. import util
|
||||
from ..util import threading
|
||||
from . import registry
|
||||
from . import legacy
|
||||
from itertools import chain
|
||||
import weakref
|
||||
|
||||
|
||||
class RefCollection(object):
|
||||
@util.memoized_property
|
||||
def ref(self):
|
||||
return weakref.ref(self, registry._collection_gced)
|
||||
|
||||
class _DispatchDescriptor(RefCollection):
|
||||
"""Class-level attributes on :class:`._Dispatch` classes."""
|
||||
|
||||
def __init__(self, parent_dispatch_cls, fn):
|
||||
self.__name__ = fn.__name__
|
||||
argspec = util.inspect_getargspec(fn)
|
||||
self.arg_names = argspec.args[1:]
|
||||
self.has_kw = bool(argspec.keywords)
|
||||
self.legacy_signatures = list(reversed(
|
||||
sorted(
|
||||
getattr(fn, '_legacy_signatures', []),
|
||||
key=lambda s: s[0]
|
||||
)
|
||||
))
|
||||
self.__doc__ = fn.__doc__ = legacy._augment_fn_docs(
|
||||
self, parent_dispatch_cls, fn)
|
||||
|
||||
self._clslevel = weakref.WeakKeyDictionary()
|
||||
self._empty_listeners = weakref.WeakKeyDictionary()
|
||||
|
||||
def _adjust_fn_spec(self, fn, named):
|
||||
if named:
|
||||
fn = self._wrap_fn_for_kw(fn)
|
||||
if self.legacy_signatures:
|
||||
try:
|
||||
argspec = util.get_callable_argspec(fn, no_self=True)
|
||||
except TypeError:
|
||||
pass
|
||||
else:
|
||||
fn = legacy._wrap_fn_for_legacy(self, fn, argspec)
|
||||
return fn
|
||||
|
||||
def _wrap_fn_for_kw(self, fn):
|
||||
def wrap_kw(*args, **kw):
|
||||
argdict = dict(zip(self.arg_names, args))
|
||||
argdict.update(kw)
|
||||
return fn(**argdict)
|
||||
return wrap_kw
|
||||
|
||||
|
||||
def insert(self, event_key, propagate):
|
||||
target = event_key.dispatch_target
|
||||
assert isinstance(target, type), \
|
||||
"Class-level Event targets must be classes."
|
||||
stack = [target]
|
||||
while stack:
|
||||
cls = stack.pop(0)
|
||||
stack.extend(cls.__subclasses__())
|
||||
if cls is not target and cls not in self._clslevel:
|
||||
self.update_subclass(cls)
|
||||
else:
|
||||
if cls not in self._clslevel:
|
||||
self._clslevel[cls] = []
|
||||
self._clslevel[cls].insert(0, event_key._listen_fn)
|
||||
registry._stored_in_collection(event_key, self)
|
||||
|
||||
def append(self, event_key, propagate):
|
||||
target = event_key.dispatch_target
|
||||
assert isinstance(target, type), \
|
||||
"Class-level Event targets must be classes."
|
||||
|
||||
stack = [target]
|
||||
while stack:
|
||||
cls = stack.pop(0)
|
||||
stack.extend(cls.__subclasses__())
|
||||
if cls is not target and cls not in self._clslevel:
|
||||
self.update_subclass(cls)
|
||||
else:
|
||||
if cls not in self._clslevel:
|
||||
self._clslevel[cls] = []
|
||||
self._clslevel[cls].append(event_key._listen_fn)
|
||||
registry._stored_in_collection(event_key, self)
|
||||
|
||||
def update_subclass(self, target):
|
||||
if target not in self._clslevel:
|
||||
self._clslevel[target] = []
|
||||
clslevel = self._clslevel[target]
|
||||
for cls in target.__mro__[1:]:
|
||||
if cls in self._clslevel:
|
||||
clslevel.extend([
|
||||
fn for fn
|
||||
in self._clslevel[cls]
|
||||
if fn not in clslevel
|
||||
])
|
||||
|
||||
def remove(self, event_key):
|
||||
target = event_key.dispatch_target
|
||||
stack = [target]
|
||||
while stack:
|
||||
cls = stack.pop(0)
|
||||
stack.extend(cls.__subclasses__())
|
||||
if cls in self._clslevel:
|
||||
self._clslevel[cls].remove(event_key._listen_fn)
|
||||
registry._removed_from_collection(event_key, self)
|
||||
|
||||
def clear(self):
|
||||
"""Clear all class level listeners"""
|
||||
|
||||
to_clear = set()
|
||||
for dispatcher in self._clslevel.values():
|
||||
to_clear.update(dispatcher)
|
||||
dispatcher[:] = []
|
||||
registry._clear(self, to_clear)
|
||||
|
||||
def for_modify(self, obj):
|
||||
"""Return an event collection which can be modified.
|
||||
|
||||
For _DispatchDescriptor at the class level of
|
||||
a dispatcher, this returns self.
|
||||
|
||||
"""
|
||||
return self
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return self
|
||||
elif obj._parent_cls in self._empty_listeners:
|
||||
ret = self._empty_listeners[obj._parent_cls]
|
||||
else:
|
||||
self._empty_listeners[obj._parent_cls] = ret = \
|
||||
_EmptyListener(self, obj._parent_cls)
|
||||
# assigning it to __dict__ means
|
||||
# memoized for fast re-access. but more memory.
|
||||
obj.__dict__[self.__name__] = ret
|
||||
return ret
|
||||
|
||||
class _HasParentDispatchDescriptor(object):
|
||||
def _adjust_fn_spec(self, fn, named):
|
||||
return self.parent._adjust_fn_spec(fn, named)
|
||||
|
||||
class _EmptyListener(_HasParentDispatchDescriptor):
|
||||
"""Serves as a class-level interface to the events
|
||||
served by a _DispatchDescriptor, when there are no
|
||||
instance-level events present.
|
||||
|
||||
Is replaced by _ListenerCollection when instance-level
|
||||
events are added.
|
||||
|
||||
"""
|
||||
def __init__(self, parent, target_cls):
|
||||
if target_cls not in parent._clslevel:
|
||||
parent.update_subclass(target_cls)
|
||||
self.parent = parent # _DispatchDescriptor
|
||||
self.parent_listeners = parent._clslevel[target_cls]
|
||||
self.name = parent.__name__
|
||||
self.propagate = frozenset()
|
||||
self.listeners = ()
|
||||
|
||||
|
||||
def for_modify(self, obj):
|
||||
"""Return an event collection which can be modified.
|
||||
|
||||
For _EmptyListener at the instance level of
|
||||
a dispatcher, this generates a new
|
||||
_ListenerCollection, applies it to the instance,
|
||||
and returns it.
|
||||
|
||||
"""
|
||||
result = _ListenerCollection(self.parent, obj._parent_cls)
|
||||
if obj.__dict__[self.name] is self:
|
||||
obj.__dict__[self.name] = result
|
||||
return result
|
||||
|
||||
def _needs_modify(self, *args, **kw):
|
||||
raise NotImplementedError("need to call for_modify()")
|
||||
|
||||
exec_once = insert = append = remove = clear = _needs_modify
|
||||
|
||||
def __call__(self, *args, **kw):
|
||||
"""Execute this event."""
|
||||
|
||||
for fn in self.parent_listeners:
|
||||
fn(*args, **kw)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.parent_listeners)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.parent_listeners)
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.parent_listeners)
|
||||
|
||||
__nonzero__ = __bool__
|
||||
|
||||
|
||||
class _CompoundListener(_HasParentDispatchDescriptor):
|
||||
_exec_once = False
|
||||
|
||||
@util.memoized_property
|
||||
def _exec_once_mutex(self):
|
||||
return threading.Lock()
|
||||
|
||||
def exec_once(self, *args, **kw):
|
||||
"""Execute this event, but only if it has not been
|
||||
executed already for this collection."""
|
||||
|
||||
if not self._exec_once:
|
||||
with self._exec_once_mutex:
|
||||
if not self._exec_once:
|
||||
try:
|
||||
self(*args, **kw)
|
||||
finally:
|
||||
self._exec_once = True
|
||||
|
||||
def __call__(self, *args, **kw):
|
||||
"""Execute this event."""
|
||||
|
||||
for fn in self.parent_listeners:
|
||||
fn(*args, **kw)
|
||||
for fn in self.listeners:
|
||||
fn(*args, **kw)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.parent_listeners) + len(self.listeners)
|
||||
|
||||
def __iter__(self):
|
||||
return chain(self.parent_listeners, self.listeners)
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.listeners or self.parent_listeners)
|
||||
|
||||
__nonzero__ = __bool__
|
||||
|
||||
class _ListenerCollection(RefCollection, _CompoundListener):
|
||||
"""Instance-level attributes on instances of :class:`._Dispatch`.
|
||||
|
||||
Represents a collection of listeners.
|
||||
|
||||
As of 0.7.9, _ListenerCollection is only first
|
||||
created via the _EmptyListener.for_modify() method.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, parent, target_cls):
|
||||
if target_cls not in parent._clslevel:
|
||||
parent.update_subclass(target_cls)
|
||||
self.parent_listeners = parent._clslevel[target_cls]
|
||||
self.parent = parent
|
||||
self.name = parent.__name__
|
||||
self.listeners = []
|
||||
self.propagate = set()
|
||||
|
||||
def for_modify(self, obj):
|
||||
"""Return an event collection which can be modified.
|
||||
|
||||
For _ListenerCollection at the instance level of
|
||||
a dispatcher, this returns self.
|
||||
|
||||
"""
|
||||
return self
|
||||
|
||||
def _update(self, other, only_propagate=True):
|
||||
"""Populate from the listeners in another :class:`_Dispatch`
|
||||
object."""
|
||||
|
||||
existing_listeners = self.listeners
|
||||
existing_listener_set = set(existing_listeners)
|
||||
self.propagate.update(other.propagate)
|
||||
other_listeners = [l for l
|
||||
in other.listeners
|
||||
if l not in existing_listener_set
|
||||
and not only_propagate or l in self.propagate
|
||||
]
|
||||
|
||||
existing_listeners.extend(other_listeners)
|
||||
|
||||
to_associate = other.propagate.union(other_listeners)
|
||||
registry._stored_in_collection_multi(self, other, to_associate)
|
||||
|
||||
def insert(self, event_key, propagate):
|
||||
if event_key._listen_fn not in self.listeners:
|
||||
event_key.prepend_to_list(self, self.listeners)
|
||||
if propagate:
|
||||
self.propagate.add(event_key._listen_fn)
|
||||
|
||||
def append(self, event_key, propagate):
|
||||
if event_key._listen_fn not in self.listeners:
|
||||
event_key.append_to_list(self, self.listeners)
|
||||
if propagate:
|
||||
self.propagate.add(event_key._listen_fn)
|
||||
|
||||
def remove(self, event_key):
|
||||
self.listeners.remove(event_key._listen_fn)
|
||||
self.propagate.discard(event_key._listen_fn)
|
||||
registry._removed_from_collection(event_key, self)
|
||||
|
||||
def clear(self):
|
||||
registry._clear(self, self.listeners)
|
||||
self.propagate.clear()
|
||||
self.listeners[:] = []
|
||||
|
||||
|
||||
class _JoinedDispatchDescriptor(object):
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return self
|
||||
else:
|
||||
obj.__dict__[self.name] = ret = _JoinedListener(
|
||||
obj.parent, self.name,
|
||||
getattr(obj.local, self.name)
|
||||
)
|
||||
return ret
|
||||
|
||||
|
||||
class _JoinedListener(_CompoundListener):
|
||||
_exec_once = False
|
||||
|
||||
def __init__(self, parent, name, local):
|
||||
self.parent = parent
|
||||
self.name = name
|
||||
self.local = local
|
||||
self.parent_listeners = self.local
|
||||
|
||||
@property
|
||||
def listeners(self):
|
||||
return getattr(self.parent, self.name)
|
||||
|
||||
def _adjust_fn_spec(self, fn, named):
|
||||
return self.local._adjust_fn_spec(fn, named)
|
||||
|
||||
def for_modify(self, obj):
|
||||
self.local = self.parent_listeners = self.local.for_modify(obj)
|
||||
return self
|
||||
|
||||
def insert(self, event_key, propagate):
|
||||
self.local.insert(event_key, propagate)
|
||||
|
||||
def append(self, event_key, propagate):
|
||||
self.local.append(event_key, propagate)
|
||||
|
||||
def remove(self, event_key):
|
||||
self.local.remove(event_key)
|
||||
|
||||
def clear(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
@ -1,217 +0,0 @@
|
||||
# event/base.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Base implementation classes.
|
||||
|
||||
The public-facing ``Events`` serves as the base class for an event interface;
|
||||
it's public attributes represent different kinds of events. These attributes
|
||||
are mirrored onto a ``_Dispatch`` class, which serves as a container for
|
||||
collections of listener functions. These collections are represented both
|
||||
at the class level of a particular ``_Dispatch`` class as well as within
|
||||
instances of ``_Dispatch``.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .. import util
|
||||
from .attr import _JoinedDispatchDescriptor, _EmptyListener, _DispatchDescriptor
|
||||
|
||||
_registrars = util.defaultdict(list)
|
||||
|
||||
|
||||
def _is_event_name(name):
|
||||
return not name.startswith('_') and name != 'dispatch'
|
||||
|
||||
|
||||
class _UnpickleDispatch(object):
|
||||
"""Serializable callable that re-generates an instance of
|
||||
:class:`_Dispatch` given a particular :class:`.Events` subclass.
|
||||
|
||||
"""
|
||||
def __call__(self, _parent_cls):
|
||||
for cls in _parent_cls.__mro__:
|
||||
if 'dispatch' in cls.__dict__:
|
||||
return cls.__dict__['dispatch'].dispatch_cls(_parent_cls)
|
||||
else:
|
||||
raise AttributeError("No class with a 'dispatch' member present.")
|
||||
|
||||
|
||||
class _Dispatch(object):
|
||||
"""Mirror the event listening definitions of an Events class with
|
||||
listener collections.
|
||||
|
||||
Classes which define a "dispatch" member will return a
|
||||
non-instantiated :class:`._Dispatch` subclass when the member
|
||||
is accessed at the class level. When the "dispatch" member is
|
||||
accessed at the instance level of its owner, an instance
|
||||
of the :class:`._Dispatch` class is returned.
|
||||
|
||||
A :class:`._Dispatch` class is generated for each :class:`.Events`
|
||||
class defined, by the :func:`._create_dispatcher_class` function.
|
||||
The original :class:`.Events` classes remain untouched.
|
||||
This decouples the construction of :class:`.Events` subclasses from
|
||||
the implementation used by the event internals, and allows
|
||||
inspecting tools like Sphinx to work in an unsurprising
|
||||
way against the public API.
|
||||
|
||||
"""
|
||||
|
||||
_events = None
|
||||
"""reference the :class:`.Events` class which this
|
||||
:class:`._Dispatch` is created for."""
|
||||
|
||||
def __init__(self, _parent_cls):
|
||||
self._parent_cls = _parent_cls
|
||||
|
||||
@util.classproperty
|
||||
def _listen(cls):
|
||||
return cls._events._listen
|
||||
|
||||
def _join(self, other):
|
||||
"""Create a 'join' of this :class:`._Dispatch` and another.
|
||||
|
||||
This new dispatcher will dispatch events to both
|
||||
:class:`._Dispatch` objects.
|
||||
|
||||
"""
|
||||
if '_joined_dispatch_cls' not in self.__class__.__dict__:
|
||||
cls = type(
|
||||
"Joined%s" % self.__class__.__name__,
|
||||
(_JoinedDispatcher, self.__class__), {}
|
||||
)
|
||||
for ls in _event_descriptors(self):
|
||||
setattr(cls, ls.name, _JoinedDispatchDescriptor(ls.name))
|
||||
|
||||
self.__class__._joined_dispatch_cls = cls
|
||||
return self._joined_dispatch_cls(self, other)
|
||||
|
||||
def __reduce__(self):
|
||||
return _UnpickleDispatch(), (self._parent_cls, )
|
||||
|
||||
def _update(self, other, only_propagate=True):
|
||||
"""Populate from the listeners in another :class:`_Dispatch`
|
||||
object."""
|
||||
|
||||
for ls in _event_descriptors(other):
|
||||
if isinstance(ls, _EmptyListener):
|
||||
continue
|
||||
getattr(self, ls.name).\
|
||||
for_modify(self)._update(ls, only_propagate=only_propagate)
|
||||
|
||||
@util.hybridmethod
|
||||
def _clear(self):
|
||||
for attr in dir(self):
|
||||
if _is_event_name(attr):
|
||||
getattr(self, attr).for_modify(self).clear()
|
||||
|
||||
|
||||
def _event_descriptors(target):
|
||||
return [getattr(target, k) for k in dir(target) if _is_event_name(k)]
|
||||
|
||||
|
||||
class _EventMeta(type):
|
||||
"""Intercept new Event subclasses and create
|
||||
associated _Dispatch classes."""
|
||||
|
||||
def __init__(cls, classname, bases, dict_):
|
||||
_create_dispatcher_class(cls, classname, bases, dict_)
|
||||
return type.__init__(cls, classname, bases, dict_)
|
||||
|
||||
|
||||
def _create_dispatcher_class(cls, classname, bases, dict_):
|
||||
"""Create a :class:`._Dispatch` class corresponding to an
|
||||
:class:`.Events` class."""
|
||||
|
||||
# there's all kinds of ways to do this,
|
||||
# i.e. make a Dispatch class that shares the '_listen' method
|
||||
# of the Event class, this is the straight monkeypatch.
|
||||
dispatch_base = getattr(cls, 'dispatch', _Dispatch)
|
||||
dispatch_cls = type("%sDispatch" % classname,
|
||||
(dispatch_base, ), {})
|
||||
cls._set_dispatch(cls, dispatch_cls)
|
||||
|
||||
for k in dict_:
|
||||
if _is_event_name(k):
|
||||
setattr(dispatch_cls, k, _DispatchDescriptor(cls, dict_[k]))
|
||||
_registrars[k].append(cls)
|
||||
|
||||
if getattr(cls, '_dispatch_target', None):
|
||||
cls._dispatch_target.dispatch = dispatcher(cls)
|
||||
|
||||
|
||||
def _remove_dispatcher(cls):
|
||||
for k in dir(cls):
|
||||
if _is_event_name(k):
|
||||
_registrars[k].remove(cls)
|
||||
if not _registrars[k]:
|
||||
del _registrars[k]
|
||||
|
||||
class Events(util.with_metaclass(_EventMeta, object)):
|
||||
"""Define event listening functions for a particular target type."""
|
||||
|
||||
@staticmethod
|
||||
def _set_dispatch(cls, dispatch_cls):
|
||||
# this allows an Events subclass to define additional utility
|
||||
# methods made available to the target via
|
||||
# "self.dispatch._events.<utilitymethod>"
|
||||
# @staticemethod to allow easy "super" calls while in a metaclass
|
||||
# constructor.
|
||||
cls.dispatch = dispatch_cls
|
||||
dispatch_cls._events = cls
|
||||
|
||||
|
||||
@classmethod
|
||||
def _accept_with(cls, target):
|
||||
# Mapper, ClassManager, Session override this to
|
||||
# also accept classes, scoped_sessions, sessionmakers, etc.
|
||||
if hasattr(target, 'dispatch') and (
|
||||
isinstance(target.dispatch, cls.dispatch) or \
|
||||
isinstance(target.dispatch, type) and \
|
||||
issubclass(target.dispatch, cls.dispatch)
|
||||
):
|
||||
return target
|
||||
else:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _listen(cls, event_key, propagate=False, insert=False, named=False):
|
||||
event_key.base_listen(propagate=propagate, insert=insert, named=named)
|
||||
|
||||
@classmethod
|
||||
def _remove(cls, event_key):
|
||||
event_key.remove()
|
||||
|
||||
@classmethod
|
||||
def _clear(cls):
|
||||
cls.dispatch._clear()
|
||||
|
||||
|
||||
class _JoinedDispatcher(object):
|
||||
"""Represent a connection between two _Dispatch objects."""
|
||||
|
||||
def __init__(self, local, parent):
|
||||
self.local = local
|
||||
self.parent = parent
|
||||
self._parent_cls = local._parent_cls
|
||||
|
||||
|
||||
class dispatcher(object):
|
||||
"""Descriptor used by target classes to
|
||||
deliver the _Dispatch class at the class level
|
||||
and produce new _Dispatch instances for target
|
||||
instances.
|
||||
|
||||
"""
|
||||
def __init__(self, events):
|
||||
self.dispatch_cls = events.dispatch
|
||||
self.events = events
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return self.dispatch_cls
|
||||
obj.__dict__['dispatch'] = disp = self.dispatch_cls(cls)
|
||||
return disp
|
||||
|
@ -1,156 +0,0 @@
|
||||
# event/legacy.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Routines to handle adaption of legacy call signatures,
|
||||
generation of deprecation notes and docstrings.
|
||||
|
||||
"""
|
||||
|
||||
from .. import util
|
||||
|
||||
def _legacy_signature(since, argnames, converter=None):
|
||||
def leg(fn):
|
||||
if not hasattr(fn, '_legacy_signatures'):
|
||||
fn._legacy_signatures = []
|
||||
fn._legacy_signatures.append((since, argnames, converter))
|
||||
return fn
|
||||
return leg
|
||||
|
||||
def _wrap_fn_for_legacy(dispatch_descriptor, fn, argspec):
|
||||
for since, argnames, conv in dispatch_descriptor.legacy_signatures:
|
||||
if argnames[-1] == "**kw":
|
||||
has_kw = True
|
||||
argnames = argnames[0:-1]
|
||||
else:
|
||||
has_kw = False
|
||||
|
||||
if len(argnames) == len(argspec.args) \
|
||||
and has_kw is bool(argspec.keywords):
|
||||
|
||||
if conv:
|
||||
assert not has_kw
|
||||
def wrap_leg(*args):
|
||||
return fn(*conv(*args))
|
||||
else:
|
||||
def wrap_leg(*args, **kw):
|
||||
argdict = dict(zip(dispatch_descriptor.arg_names, args))
|
||||
args = [argdict[name] for name in argnames]
|
||||
if has_kw:
|
||||
return fn(*args, **kw)
|
||||
else:
|
||||
return fn(*args)
|
||||
return wrap_leg
|
||||
else:
|
||||
return fn
|
||||
|
||||
def _indent(text, indent):
|
||||
return "\n".join(
|
||||
indent + line
|
||||
for line in text.split("\n")
|
||||
)
|
||||
|
||||
def _standard_listen_example(dispatch_descriptor, sample_target, fn):
|
||||
example_kw_arg = _indent(
|
||||
"\n".join(
|
||||
"%(arg)s = kw['%(arg)s']" % {"arg": arg}
|
||||
for arg in dispatch_descriptor.arg_names[0:2]
|
||||
),
|
||||
" ")
|
||||
if dispatch_descriptor.legacy_signatures:
|
||||
current_since = max(since for since, args, conv
|
||||
in dispatch_descriptor.legacy_signatures)
|
||||
else:
|
||||
current_since = None
|
||||
text = (
|
||||
"from sqlalchemy import event\n\n"
|
||||
"# standard decorator style%(current_since)s\n"
|
||||
"@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
|
||||
"def receive_%(event_name)s(%(named_event_arguments)s%(has_kw_arguments)s):\n"
|
||||
" \"listen for the '%(event_name)s' event\"\n"
|
||||
"\n # ... (event handling logic) ...\n"
|
||||
)
|
||||
|
||||
if len(dispatch_descriptor.arg_names) > 3:
|
||||
text += (
|
||||
|
||||
"\n# named argument style (new in 0.9)\n"
|
||||
"@event.listens_for(%(sample_target)s, '%(event_name)s', named=True)\n"
|
||||
"def receive_%(event_name)s(**kw):\n"
|
||||
" \"listen for the '%(event_name)s' event\"\n"
|
||||
"%(example_kw_arg)s\n"
|
||||
"\n # ... (event handling logic) ...\n"
|
||||
)
|
||||
|
||||
text %= {
|
||||
"current_since": " (arguments as of %s)" %
|
||||
current_since if current_since else "",
|
||||
"event_name": fn.__name__,
|
||||
"has_kw_arguments": ", **kw" if dispatch_descriptor.has_kw else "",
|
||||
"named_event_arguments": ", ".join(dispatch_descriptor.arg_names),
|
||||
"example_kw_arg": example_kw_arg,
|
||||
"sample_target": sample_target
|
||||
}
|
||||
return text
|
||||
|
||||
def _legacy_listen_examples(dispatch_descriptor, sample_target, fn):
|
||||
text = ""
|
||||
for since, args, conv in dispatch_descriptor.legacy_signatures:
|
||||
text += (
|
||||
"\n# legacy calling style (pre-%(since)s)\n"
|
||||
"@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
|
||||
"def receive_%(event_name)s(%(named_event_arguments)s%(has_kw_arguments)s):\n"
|
||||
" \"listen for the '%(event_name)s' event\"\n"
|
||||
"\n # ... (event handling logic) ...\n" % {
|
||||
"since": since,
|
||||
"event_name": fn.__name__,
|
||||
"has_kw_arguments": " **kw" if dispatch_descriptor.has_kw else "",
|
||||
"named_event_arguments": ", ".join(args),
|
||||
"sample_target": sample_target
|
||||
}
|
||||
)
|
||||
return text
|
||||
|
||||
def _version_signature_changes(dispatch_descriptor):
|
||||
since, args, conv = dispatch_descriptor.legacy_signatures[0]
|
||||
return (
|
||||
"\n.. versionchanged:: %(since)s\n"
|
||||
" The ``%(event_name)s`` event now accepts the \n"
|
||||
" arguments ``%(named_event_arguments)s%(has_kw_arguments)s``.\n"
|
||||
" Listener functions which accept the previous argument \n"
|
||||
" signature(s) listed above will be automatically \n"
|
||||
" adapted to the new signature." % {
|
||||
"since": since,
|
||||
"event_name": dispatch_descriptor.__name__,
|
||||
"named_event_arguments": ", ".join(dispatch_descriptor.arg_names),
|
||||
"has_kw_arguments": ", **kw" if dispatch_descriptor.has_kw else ""
|
||||
}
|
||||
)
|
||||
|
||||
def _augment_fn_docs(dispatch_descriptor, parent_dispatch_cls, fn):
|
||||
header = ".. container:: event_signatures\n\n"\
|
||||
" Example argument forms::\n"\
|
||||
"\n"
|
||||
|
||||
sample_target = getattr(parent_dispatch_cls, "_target_class_doc", "obj")
|
||||
text = (
|
||||
header +
|
||||
_indent(
|
||||
_standard_listen_example(
|
||||
dispatch_descriptor, sample_target, fn),
|
||||
" " * 8)
|
||||
)
|
||||
if dispatch_descriptor.legacy_signatures:
|
||||
text += _indent(
|
||||
_legacy_listen_examples(
|
||||
dispatch_descriptor, sample_target, fn),
|
||||
" " * 8)
|
||||
|
||||
text += _version_signature_changes(dispatch_descriptor)
|
||||
|
||||
return util.inject_docstring_text(fn.__doc__,
|
||||
text,
|
||||
1
|
||||
)
|
@ -1,241 +0,0 @@
|
||||
# event/registry.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Provides managed registration services on behalf of :func:`.listen`
|
||||
arguments.
|
||||
|
||||
By "managed registration", we mean that event listening functions and
|
||||
other objects can be added to various collections in such a way that their
|
||||
membership in all those collections can be revoked at once, based on
|
||||
an equivalent :class:`._EventKey`.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import weakref
|
||||
import collections
|
||||
import types
|
||||
from .. import exc, util
|
||||
|
||||
|
||||
_key_to_collection = collections.defaultdict(dict)
|
||||
"""
|
||||
Given an original listen() argument, can locate all
|
||||
listener collections and the listener fn contained
|
||||
|
||||
(target, identifier, fn) -> {
|
||||
ref(listenercollection) -> ref(listener_fn)
|
||||
ref(listenercollection) -> ref(listener_fn)
|
||||
ref(listenercollection) -> ref(listener_fn)
|
||||
}
|
||||
"""
|
||||
|
||||
_collection_to_key = collections.defaultdict(dict)
|
||||
"""
|
||||
Given a _ListenerCollection or _DispatchDescriptor, can locate
|
||||
all the original listen() arguments and the listener fn contained
|
||||
|
||||
ref(listenercollection) -> {
|
||||
ref(listener_fn) -> (target, identifier, fn),
|
||||
ref(listener_fn) -> (target, identifier, fn),
|
||||
ref(listener_fn) -> (target, identifier, fn),
|
||||
}
|
||||
"""
|
||||
|
||||
def _collection_gced(ref):
|
||||
# defaultdict, so can't get a KeyError
|
||||
if not _collection_to_key or ref not in _collection_to_key:
|
||||
return
|
||||
listener_to_key = _collection_to_key.pop(ref)
|
||||
for key in listener_to_key.values():
|
||||
if key in _key_to_collection:
|
||||
# defaultdict, so can't get a KeyError
|
||||
dispatch_reg = _key_to_collection[key]
|
||||
dispatch_reg.pop(ref)
|
||||
if not dispatch_reg:
|
||||
_key_to_collection.pop(key)
|
||||
|
||||
def _stored_in_collection(event_key, owner):
|
||||
key = event_key._key
|
||||
|
||||
dispatch_reg = _key_to_collection[key]
|
||||
|
||||
owner_ref = owner.ref
|
||||
listen_ref = weakref.ref(event_key._listen_fn)
|
||||
|
||||
if owner_ref in dispatch_reg:
|
||||
assert dispatch_reg[owner_ref] == listen_ref
|
||||
else:
|
||||
dispatch_reg[owner_ref] = listen_ref
|
||||
|
||||
listener_to_key = _collection_to_key[owner_ref]
|
||||
listener_to_key[listen_ref] = key
|
||||
|
||||
def _removed_from_collection(event_key, owner):
|
||||
key = event_key._key
|
||||
|
||||
dispatch_reg = _key_to_collection[key]
|
||||
|
||||
listen_ref = weakref.ref(event_key._listen_fn)
|
||||
|
||||
owner_ref = owner.ref
|
||||
dispatch_reg.pop(owner_ref, None)
|
||||
if not dispatch_reg:
|
||||
del _key_to_collection[key]
|
||||
|
||||
if owner_ref in _collection_to_key:
|
||||
listener_to_key = _collection_to_key[owner_ref]
|
||||
listener_to_key.pop(listen_ref)
|
||||
|
||||
def _stored_in_collection_multi(newowner, oldowner, elements):
|
||||
if not elements:
|
||||
return
|
||||
|
||||
oldowner = oldowner.ref
|
||||
newowner = newowner.ref
|
||||
|
||||
old_listener_to_key = _collection_to_key[oldowner]
|
||||
new_listener_to_key = _collection_to_key[newowner]
|
||||
|
||||
for listen_fn in elements:
|
||||
listen_ref = weakref.ref(listen_fn)
|
||||
key = old_listener_to_key[listen_ref]
|
||||
dispatch_reg = _key_to_collection[key]
|
||||
if newowner in dispatch_reg:
|
||||
assert dispatch_reg[newowner] == listen_ref
|
||||
else:
|
||||
dispatch_reg[newowner] = listen_ref
|
||||
|
||||
new_listener_to_key[listen_ref] = key
|
||||
|
||||
def _clear(owner, elements):
|
||||
if not elements:
|
||||
return
|
||||
|
||||
owner = owner.ref
|
||||
listener_to_key = _collection_to_key[owner]
|
||||
for listen_fn in elements:
|
||||
listen_ref = weakref.ref(listen_fn)
|
||||
key = listener_to_key[listen_ref]
|
||||
dispatch_reg = _key_to_collection[key]
|
||||
dispatch_reg.pop(owner, None)
|
||||
|
||||
if not dispatch_reg:
|
||||
del _key_to_collection[key]
|
||||
|
||||
|
||||
class _EventKey(object):
|
||||
"""Represent :func:`.listen` arguments.
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, target, identifier, fn, dispatch_target, _fn_wrap=None):
|
||||
self.target = target
|
||||
self.identifier = identifier
|
||||
self.fn = fn
|
||||
if isinstance(fn, types.MethodType):
|
||||
self.fn_key = id(fn.__func__), id(fn.__self__)
|
||||
else:
|
||||
self.fn_key = id(fn)
|
||||
self.fn_wrap = _fn_wrap
|
||||
self.dispatch_target = dispatch_target
|
||||
|
||||
@property
|
||||
def _key(self):
|
||||
return (id(self.target), self.identifier, self.fn_key)
|
||||
|
||||
def with_wrapper(self, fn_wrap):
|
||||
if fn_wrap is self._listen_fn:
|
||||
return self
|
||||
else:
|
||||
return _EventKey(
|
||||
self.target,
|
||||
self.identifier,
|
||||
self.fn,
|
||||
self.dispatch_target,
|
||||
_fn_wrap=fn_wrap
|
||||
)
|
||||
|
||||
def with_dispatch_target(self, dispatch_target):
|
||||
if dispatch_target is self.dispatch_target:
|
||||
return self
|
||||
else:
|
||||
return _EventKey(
|
||||
self.target,
|
||||
self.identifier,
|
||||
self.fn,
|
||||
dispatch_target,
|
||||
_fn_wrap=self.fn_wrap
|
||||
)
|
||||
|
||||
def listen(self, *args, **kw):
|
||||
once = kw.pop("once", False)
|
||||
if once:
|
||||
self.with_wrapper(util.only_once(self._listen_fn)).listen(*args, **kw)
|
||||
else:
|
||||
self.dispatch_target.dispatch._listen(self, *args, **kw)
|
||||
|
||||
def remove(self):
|
||||
key = self._key
|
||||
|
||||
if key not in _key_to_collection:
|
||||
raise exc.InvalidRequestError(
|
||||
"No listeners found for event %s / %r / %s " %
|
||||
(self.target, self.identifier, self.fn)
|
||||
)
|
||||
dispatch_reg = _key_to_collection.pop(key)
|
||||
|
||||
for collection_ref, listener_ref in dispatch_reg.items():
|
||||
collection = collection_ref()
|
||||
listener_fn = listener_ref()
|
||||
if collection is not None and listener_fn is not None:
|
||||
collection.remove(self.with_wrapper(listener_fn))
|
||||
|
||||
def contains(self):
|
||||
"""Return True if this event key is registered to listen.
|
||||
"""
|
||||
return self._key in _key_to_collection
|
||||
|
||||
def base_listen(self, propagate=False, insert=False,
|
||||
named=False):
|
||||
|
||||
target, identifier, fn = \
|
||||
self.dispatch_target, self.identifier, self._listen_fn
|
||||
|
||||
dispatch_descriptor = getattr(target.dispatch, identifier)
|
||||
|
||||
fn = dispatch_descriptor._adjust_fn_spec(fn, named)
|
||||
self = self.with_wrapper(fn)
|
||||
|
||||
if insert:
|
||||
dispatch_descriptor.\
|
||||
for_modify(target.dispatch).insert(self, propagate)
|
||||
else:
|
||||
dispatch_descriptor.\
|
||||
for_modify(target.dispatch).append(self, propagate)
|
||||
|
||||
@property
|
||||
def _listen_fn(self):
|
||||
return self.fn_wrap or self.fn
|
||||
|
||||
def append_value_to_list(self, owner, list_, value):
|
||||
_stored_in_collection(self, owner)
|
||||
list_.append(value)
|
||||
|
||||
def append_to_list(self, owner, list_):
|
||||
_stored_in_collection(self, owner)
|
||||
list_.append(self._listen_fn)
|
||||
|
||||
def remove_from_list(self, owner, list_):
|
||||
_removed_from_collection(self, owner)
|
||||
list_.remove(self._listen_fn)
|
||||
|
||||
def prepend_to_list(self, owner, list_):
|
||||
_stored_in_collection(self, owner)
|
||||
list_.insert(0, self._listen_fn)
|
||||
|
||||
|
@ -6,15 +6,15 @@
|
||||
|
||||
"""Core event interfaces."""
|
||||
|
||||
from . import event, exc
|
||||
from .pool import Pool
|
||||
from .engine import Connectable, Engine, Dialect
|
||||
from .sql.base import SchemaEventTarget
|
||||
from . import event, exc, util
|
||||
engine = util.importlater('sqlalchemy', 'engine')
|
||||
pool = util.importlater('sqlalchemy', 'pool')
|
||||
|
||||
|
||||
class DDLEvents(event.Events):
|
||||
"""
|
||||
Define event listeners for schema objects,
|
||||
that is, :class:`.SchemaItem` and other :class:`.SchemaEventTarget`
|
||||
that is, :class:`.SchemaItem` and :class:`.SchemaEvent`
|
||||
subclasses, including :class:`.MetaData`, :class:`.Table`,
|
||||
:class:`.Column`.
|
||||
|
||||
@ -70,11 +70,8 @@ class DDLEvents(event.Events):
|
||||
|
||||
"""
|
||||
|
||||
_target_class_doc = "SomeSchemaClassOrObject"
|
||||
_dispatch_target = SchemaEventTarget
|
||||
|
||||
def before_create(self, target, connection, **kw):
|
||||
"""Called before CREATE statements are emitted.
|
||||
"""Called before CREATE statments are emitted.
|
||||
|
||||
:param target: the :class:`.MetaData` or :class:`.Table`
|
||||
object which is the target of the event.
|
||||
@ -90,7 +87,7 @@ class DDLEvents(event.Events):
|
||||
"""
|
||||
|
||||
def after_create(self, target, connection, **kw):
|
||||
"""Called after CREATE statements are emitted.
|
||||
"""Called after CREATE statments are emitted.
|
||||
|
||||
:param target: the :class:`.MetaData` or :class:`.Table`
|
||||
object which is the target of the event.
|
||||
@ -106,7 +103,7 @@ class DDLEvents(event.Events):
|
||||
"""
|
||||
|
||||
def before_drop(self, target, connection, **kw):
|
||||
"""Called before DROP statements are emitted.
|
||||
"""Called before DROP statments are emitted.
|
||||
|
||||
:param target: the :class:`.MetaData` or :class:`.Table`
|
||||
object which is the target of the event.
|
||||
@ -122,7 +119,7 @@ class DDLEvents(event.Events):
|
||||
"""
|
||||
|
||||
def after_drop(self, target, connection, **kw):
|
||||
"""Called after DROP statements are emitted.
|
||||
"""Called after DROP statments are emitted.
|
||||
|
||||
:param target: the :class:`.MetaData` or :class:`.Table`
|
||||
object which is the target of the event.
|
||||
@ -220,6 +217,25 @@ class DDLEvents(event.Events):
|
||||
"""
|
||||
|
||||
|
||||
class SchemaEventTarget(object):
|
||||
"""Base class for elements that are the targets of :class:`.DDLEvents`
|
||||
events.
|
||||
|
||||
This includes :class:`.SchemaItem` as well as :class:`.SchemaType`.
|
||||
|
||||
"""
|
||||
dispatch = event.dispatcher(DDLEvents)
|
||||
|
||||
def _set_parent(self, parent):
|
||||
"""Associate with this SchemaEvent's parent object."""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def _set_parent_with_dispatch(self, parent):
|
||||
self.dispatch.before_parent_attach(self, parent)
|
||||
self._set_parent(parent)
|
||||
self.dispatch.after_parent_attach(self, parent)
|
||||
|
||||
|
||||
class PoolEvents(event.Events):
|
||||
"""Available events for :class:`.Pool`.
|
||||
@ -250,77 +266,59 @@ class PoolEvents(event.Events):
|
||||
|
||||
"""
|
||||
|
||||
_target_class_doc = "SomeEngineOrPool"
|
||||
_dispatch_target = Pool
|
||||
|
||||
@classmethod
|
||||
def _accept_with(cls, target):
|
||||
if isinstance(target, type):
|
||||
if issubclass(target, Engine):
|
||||
return Pool
|
||||
elif issubclass(target, Pool):
|
||||
if issubclass(target, engine.Engine):
|
||||
return pool.Pool
|
||||
elif issubclass(target, pool.Pool):
|
||||
return target
|
||||
elif isinstance(target, Engine):
|
||||
elif isinstance(target, engine.Engine):
|
||||
return target.pool
|
||||
else:
|
||||
return target
|
||||
|
||||
def connect(self, dbapi_connection, connection_record):
|
||||
"""Called at the moment a particular DBAPI connection is first
|
||||
created for a given :class:`.Pool`.
|
||||
"""Called once for each new DB-API connection or Pool's ``creator()``.
|
||||
|
||||
This event allows one to capture the point directly after which
|
||||
the DBAPI module-level ``.connect()`` method has been used in order
|
||||
to produce a new DBAPI connection.
|
||||
:param dbapi_con:
|
||||
A newly connected raw DB-API connection (not a SQLAlchemy
|
||||
``Connection`` wrapper).
|
||||
|
||||
:param dbapi_connection: a DBAPI connection.
|
||||
|
||||
:param connection_record: the :class:`._ConnectionRecord` managing the
|
||||
DBAPI connection.
|
||||
:param con_record:
|
||||
The ``_ConnectionRecord`` that persistently manages the connection
|
||||
|
||||
"""
|
||||
|
||||
def first_connect(self, dbapi_connection, connection_record):
|
||||
"""Called exactly once for the first time a DBAPI connection is
|
||||
checked out from a particular :class:`.Pool`.
|
||||
"""Called exactly once for the first DB-API connection.
|
||||
|
||||
The rationale for :meth:`.PoolEvents.first_connect` is to determine
|
||||
information about a particular series of database connections based
|
||||
on the settings used for all connections. Since a particular
|
||||
:class:`.Pool` refers to a single "creator" function (which in terms
|
||||
of a :class:`.Engine` refers to the URL and connection options used),
|
||||
it is typically valid to make observations about a single connection
|
||||
that can be safely assumed to be valid about all subsequent connections,
|
||||
such as the database version, the server and client encoding settings,
|
||||
collation settings, and many others.
|
||||
:param dbapi_con:
|
||||
A newly connected raw DB-API connection (not a SQLAlchemy
|
||||
``Connection`` wrapper).
|
||||
|
||||
:param dbapi_connection: a DBAPI connection.
|
||||
|
||||
:param connection_record: the :class:`._ConnectionRecord` managing the
|
||||
DBAPI connection.
|
||||
:param con_record:
|
||||
The ``_ConnectionRecord`` that persistently manages the connection
|
||||
|
||||
"""
|
||||
|
||||
def checkout(self, dbapi_connection, connection_record, connection_proxy):
|
||||
"""Called when a connection is retrieved from the Pool.
|
||||
|
||||
:param dbapi_connection: a DBAPI connection.
|
||||
:param dbapi_con:
|
||||
A raw DB-API connection
|
||||
|
||||
:param connection_record: the :class:`._ConnectionRecord` managing the
|
||||
DBAPI connection.
|
||||
:param con_record:
|
||||
The ``_ConnectionRecord`` that persistently manages the connection
|
||||
|
||||
:param connection_proxy: the :class:`._ConnectionFairy` object which
|
||||
will proxy the public interface of the DBAPI connection for the lifespan
|
||||
of the checkout.
|
||||
:param con_proxy:
|
||||
The ``_ConnectionFairy`` which manages the connection for the span of
|
||||
the current checkout.
|
||||
|
||||
If you raise a :class:`~sqlalchemy.exc.DisconnectionError`, the current
|
||||
connection will be disposed and a fresh connection retrieved.
|
||||
Processing of all checkout listeners will abort and restart
|
||||
using the new connection.
|
||||
|
||||
.. seealso:: :meth:`.ConnectionEvents.engine_connect` - a similar event
|
||||
which occurs upon creation of a new :class:`.Connection`.
|
||||
|
||||
"""
|
||||
|
||||
def checkin(self, dbapi_connection, connection_record):
|
||||
@ -330,14 +328,15 @@ class PoolEvents(event.Events):
|
||||
connection has been invalidated. ``checkin`` will not be called
|
||||
for detached connections. (They do not return to the pool.)
|
||||
|
||||
:param dbapi_connection: a DBAPI connection.
|
||||
:param dbapi_con:
|
||||
A raw DB-API connection
|
||||
|
||||
:param connection_record: the :class:`._ConnectionRecord` managing the
|
||||
DBAPI connection.
|
||||
:param con_record:
|
||||
The ``_ConnectionRecord`` that persistently manages the connection
|
||||
|
||||
"""
|
||||
|
||||
def reset(self, dbapi_connnection, connection_record):
|
||||
def reset(self, dbapi_con, con_record):
|
||||
"""Called before the "reset" action occurs for a pooled connection.
|
||||
|
||||
This event represents
|
||||
@ -348,13 +347,14 @@ class PoolEvents(event.Events):
|
||||
|
||||
|
||||
The :meth:`.PoolEvents.reset` event is usually followed by the
|
||||
:meth:`.PoolEvents.checkin` event is called, except in those
|
||||
the :meth:`.PoolEvents.checkin` event is called, except in those
|
||||
cases where the connection is discarded immediately after reset.
|
||||
|
||||
:param dbapi_connection: a DBAPI connection.
|
||||
:param dbapi_con:
|
||||
A raw DB-API connection
|
||||
|
||||
:param connection_record: the :class:`._ConnectionRecord` managing the
|
||||
DBAPI connection.
|
||||
:param con_record:
|
||||
The ``_ConnectionRecord`` that persistently manages the connection
|
||||
|
||||
.. versionadded:: 0.8
|
||||
|
||||
@ -366,30 +366,6 @@ class PoolEvents(event.Events):
|
||||
|
||||
"""
|
||||
|
||||
def invalidate(self, dbapi_connection, connection_record, exception):
|
||||
"""Called when a DBAPI connection is to be "invalidated".
|
||||
|
||||
This event is called any time the :meth:`._ConnectionRecord.invalidate`
|
||||
method is invoked, either from API usage or via "auto-invalidation".
|
||||
The event occurs before a final attempt to call ``.close()`` on the connection
|
||||
occurs.
|
||||
|
||||
:param dbapi_connection: a DBAPI connection.
|
||||
|
||||
:param connection_record: the :class:`._ConnectionRecord` managing the
|
||||
DBAPI connection.
|
||||
|
||||
:param exception: the exception object corresponding to the reason
|
||||
for this invalidation, if any. May be ``None``.
|
||||
|
||||
.. versionadded:: 0.9.2 Added support for connection invalidation
|
||||
listening.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`pool_connection_invalidation`
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class ConnectionEvents(event.Events):
|
||||
@ -463,15 +439,8 @@ class ConnectionEvents(event.Events):
|
||||
|
||||
"""
|
||||
|
||||
_target_class_doc = "SomeEngine"
|
||||
_dispatch_target = Connectable
|
||||
|
||||
|
||||
@classmethod
|
||||
def _listen(cls, event_key, retval=False):
|
||||
target, identifier, fn = \
|
||||
event_key.dispatch_target, event_key.identifier, event_key.fn
|
||||
|
||||
def _listen(cls, target, identifier, fn, retval=False):
|
||||
target._has_events = True
|
||||
|
||||
if not retval:
|
||||
@ -500,7 +469,7 @@ class ConnectionEvents(event.Events):
|
||||
"'before_cursor_execute' engine "
|
||||
"event listeners accept the 'retval=True' "
|
||||
"argument.")
|
||||
event_key.with_wrapper(fn).base_listen()
|
||||
event.Events._listen(target, identifier, fn)
|
||||
|
||||
def before_execute(self, conn, clauseelement, multiparams, params):
|
||||
"""Intercept high level execute() events, receiving uncompiled
|
||||
@ -611,7 +580,7 @@ class ConnectionEvents(event.Events):
|
||||
|
||||
This event is called with the DBAPI exception instance
|
||||
received from the DBAPI itself, *before* SQLAlchemy wraps the
|
||||
exception with its own exception wrappers, and before any
|
||||
exception with it's own exception wrappers, and before any
|
||||
other operations are performed on the DBAPI cursor; the
|
||||
existing transaction remains in effect as well as any state
|
||||
on the cursor.
|
||||
@ -646,103 +615,6 @@ class ConnectionEvents(event.Events):
|
||||
|
||||
"""
|
||||
|
||||
def engine_connect(self, conn, branch):
|
||||
"""Intercept the creation of a new :class:`.Connection`.
|
||||
|
||||
This event is called typically as the direct result of calling
|
||||
the :meth:`.Engine.connect` method.
|
||||
|
||||
It differs from the :meth:`.PoolEvents.connect` method, which
|
||||
refers to the actual connection to a database at the DBAPI level;
|
||||
a DBAPI connection may be pooled and reused for many operations.
|
||||
In contrast, this event refers only to the production of a higher level
|
||||
:class:`.Connection` wrapper around such a DBAPI connection.
|
||||
|
||||
It also differs from the :meth:`.PoolEvents.checkout` event
|
||||
in that it is specific to the :class:`.Connection` object, not the
|
||||
DBAPI connection that :meth:`.PoolEvents.checkout` deals with, although
|
||||
this DBAPI connection is available here via the :attr:`.Connection.connection`
|
||||
attribute. But note there can in fact
|
||||
be multiple :meth:`.PoolEvents.checkout` events within the lifespan
|
||||
of a single :class:`.Connection` object, if that :class:`.Connection`
|
||||
is invalidated and re-established. There can also be multiple
|
||||
:class:`.Connection` objects generated for the same already-checked-out
|
||||
DBAPI connection, in the case that a "branch" of a :class:`.Connection`
|
||||
is produced.
|
||||
|
||||
:param conn: :class:`.Connection` object.
|
||||
:param branch: if True, this is a "branch" of an existing
|
||||
:class:`.Connection`. A branch is generated within the course
|
||||
of a statement execution to invoke supplemental statements, most
|
||||
typically to pre-execute a SELECT of a default value for the purposes
|
||||
of an INSERT statement.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.PoolEvents.checkout` the lower-level pool checkout event
|
||||
for an individual DBAPI connection
|
||||
|
||||
:meth:`.ConnectionEvents.set_connection_execution_options` - a copy of a
|
||||
:class:`.Connection` is also made when the
|
||||
:meth:`.Connection.execution_options` method is called.
|
||||
|
||||
"""
|
||||
|
||||
def set_connection_execution_options(self, conn, opts):
|
||||
"""Intercept when the :meth:`.Connection.execution_options`
|
||||
method is called.
|
||||
|
||||
This method is called after the new :class:`.Connection` has been
|
||||
produced, with the newly updated execution options collection, but
|
||||
before the :class:`.Dialect` has acted upon any of those new options.
|
||||
|
||||
Note that this method is not called when a new :class:`.Connection`
|
||||
is produced which is inheriting execution options from its parent
|
||||
:class:`.Engine`; to intercept this condition, use the
|
||||
:meth:`.ConnectionEvents.engine_connect` event.
|
||||
|
||||
:param conn: The newly copied :class:`.Connection` object
|
||||
|
||||
:param opts: dictionary of options that were passed to the
|
||||
:meth:`.Connection.execution_options` method.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.ConnectionEvents.set_engine_execution_options` - event
|
||||
which is called when :meth:`.Engine.execution_options` is called.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def set_engine_execution_options(self, engine, opts):
|
||||
"""Intercept when the :meth:`.Engine.execution_options`
|
||||
method is called.
|
||||
|
||||
The :meth:`.Engine.execution_options` method produces a shallow
|
||||
copy of the :class:`.Engine` which stores the new options. That new
|
||||
:class:`.Engine` is passed here. A particular application of this
|
||||
method is to add a :meth:`.ConnectionEvents.engine_connect` event
|
||||
handler to the given :class:`.Engine` which will perform some per-
|
||||
:class:`.Connection` task specific to these execution options.
|
||||
|
||||
:param conn: The newly copied :class:`.Engine` object
|
||||
|
||||
:param opts: dictionary of options that were passed to the
|
||||
:meth:`.Connection.execution_options` method.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.ConnectionEvents.set_connection_execution_options` - event
|
||||
which is called when :meth:`.Connection.execution_options` is called.
|
||||
|
||||
"""
|
||||
|
||||
def begin(self, conn):
|
||||
"""Intercept begin() events.
|
||||
|
||||
@ -780,7 +652,7 @@ class ConnectionEvents(event.Events):
|
||||
:param conn: :class:`.Connection` object
|
||||
"""
|
||||
|
||||
def savepoint(self, conn, name):
|
||||
def savepoint(self, conn, name=None):
|
||||
"""Intercept savepoint() events.
|
||||
|
||||
:param conn: :class:`.Connection` object
|
||||
@ -840,85 +712,3 @@ class ConnectionEvents(event.Events):
|
||||
:meth:`.TwoPhaseTransaction.prepare` was called.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class DialectEvents(event.Events):
|
||||
"""event interface for execution-replacement functions.
|
||||
|
||||
These events allow direct instrumentation and replacement
|
||||
of key dialect functions which interact with the DBAPI.
|
||||
|
||||
.. note::
|
||||
|
||||
:class:`.DialectEvents` hooks should be considered **semi-public**
|
||||
and experimental.
|
||||
These hooks are not for general use and are only for those situations where
|
||||
intricate re-statement of DBAPI mechanics must be injected onto an existing
|
||||
dialect. For general-use statement-interception events, please
|
||||
use the :class:`.ConnectionEvents` interface.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.ConnectionEvents.before_cursor_execute`
|
||||
|
||||
:meth:`.ConnectionEvents.before_execute`
|
||||
|
||||
:meth:`.ConnectionEvents.after_cursor_execute`
|
||||
|
||||
:meth:`.ConnectionEvents.after_execute`
|
||||
|
||||
|
||||
.. versionadded:: 0.9.4
|
||||
|
||||
"""
|
||||
|
||||
_target_class_doc = "SomeEngine"
|
||||
_dispatch_target = Dialect
|
||||
|
||||
@classmethod
|
||||
def _listen(cls, event_key, retval=False):
|
||||
target, identifier, fn = \
|
||||
event_key.dispatch_target, event_key.identifier, event_key.fn
|
||||
|
||||
target._has_events = True
|
||||
event_key.base_listen()
|
||||
|
||||
@classmethod
|
||||
def _accept_with(cls, target):
|
||||
if isinstance(target, type):
|
||||
if issubclass(target, Engine):
|
||||
return Dialect
|
||||
elif issubclass(target, Dialect):
|
||||
return target
|
||||
elif isinstance(target, Engine):
|
||||
return target.dialect
|
||||
else:
|
||||
return target
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context):
|
||||
"""Receive a cursor to have executemany() called.
|
||||
|
||||
Return the value True to halt further events from invoking,
|
||||
and to indicate that the cursor execution has already taken
|
||||
place within the event handler.
|
||||
|
||||
"""
|
||||
|
||||
def do_execute_no_params(self, cursor, statement, context):
|
||||
"""Receive a cursor to have execute() with no parameters called.
|
||||
|
||||
Return the value True to halt further events from invoking,
|
||||
and to indicate that the cursor execution has already taken
|
||||
place within the event handler.
|
||||
|
||||
"""
|
||||
|
||||
def do_execute(self, cursor, statement, parameters, context):
|
||||
"""Receive a cursor to have execute() called.
|
||||
|
||||
Return the value True to halt further events from invoking,
|
||||
and to indicate that the cursor execution has already taken
|
||||
place within the event handler.
|
||||
|
||||
"""
|
||||
|
||||
|
@ -6,9 +6,9 @@
|
||||
|
||||
"""Exceptions used with SQLAlchemy.
|
||||
|
||||
The base exception class is :exc:`.SQLAlchemyError`. Exceptions which are
|
||||
The base exception class is :class:`.SQLAlchemyError`. Exceptions which are
|
||||
raised as a result of DBAPI exceptions are all subclasses of
|
||||
:exc:`.DBAPIError`.
|
||||
:class:`.DBAPIError`.
|
||||
|
||||
"""
|
||||
|
||||
@ -26,9 +26,6 @@ class ArgumentError(SQLAlchemyError):
|
||||
|
||||
"""
|
||||
|
||||
class NoSuchModuleError(ArgumentError):
|
||||
"""Raised when a dynamically-loaded module (usually a database dialect)
|
||||
of a particular name cannot be located."""
|
||||
|
||||
class NoForeignKeysError(ArgumentError):
|
||||
"""Raised when no foreign keys can be located between two selectables
|
||||
@ -172,7 +169,7 @@ class UnboundExecutionError(InvalidRequestError):
|
||||
|
||||
class DontWrapMixin(object):
|
||||
"""A mixin class which, when applied to a user-defined Exception class,
|
||||
will not be wrapped inside of :exc:`.StatementError` if the error is
|
||||
will not be wrapped inside of :class:`.StatementError` if the error is
|
||||
emitted within the process of executing a statement.
|
||||
|
||||
E.g.::
|
||||
@ -190,6 +187,10 @@ class DontWrapMixin(object):
|
||||
raise MyCustomException("invalid!")
|
||||
|
||||
"""
|
||||
import sys
|
||||
if sys.version_info < (2, 5):
|
||||
class DontWrapMixin:
|
||||
pass
|
||||
|
||||
# Moved to orm.exc; compatibility definition installed by orm import until 0.6
|
||||
UnmappedColumnError = None
|
||||
@ -222,10 +223,6 @@ class StatementError(SQLAlchemyError):
|
||||
self.statement = statement
|
||||
self.params = params
|
||||
self.orig = orig
|
||||
self.detail = []
|
||||
|
||||
def add_detail(self, msg):
|
||||
self.detail.append(msg)
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (self.args[0], self.statement,
|
||||
@ -234,13 +231,8 @@ class StatementError(SQLAlchemyError):
|
||||
def __str__(self):
|
||||
from sqlalchemy.sql import util
|
||||
params_repr = util._repr_params(self.params, 10)
|
||||
|
||||
return ' '.join([
|
||||
"(%s)" % det for det in self.detail
|
||||
] + [
|
||||
SQLAlchemyError.__str__(self),
|
||||
repr(self.statement), repr(params_repr)
|
||||
])
|
||||
return ' '.join((SQLAlchemyError.__str__(self),
|
||||
repr(self.statement), repr(params_repr)))
|
||||
|
||||
def __unicode__(self):
|
||||
return self.__str__()
|
||||
@ -305,7 +297,7 @@ class DBAPIError(StatementError):
|
||||
text = str(orig)
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except Exception as e:
|
||||
except Exception, e:
|
||||
text = 'Error in str() of DB-API-generated exception: ' + str(e)
|
||||
StatementError.__init__(
|
||||
self,
|
||||
|
@ -17,7 +17,7 @@ import operator
|
||||
import weakref
|
||||
from .. import exc, orm, util
|
||||
from ..orm import collections, interfaces
|
||||
from ..sql import not_, or_
|
||||
from ..sql import not_
|
||||
|
||||
|
||||
def association_proxy(target_collection, attr, **kw):
|
||||
@ -231,10 +231,6 @@ class AssociationProxy(interfaces._InspectionAttr):
|
||||
return not self._get_property().\
|
||||
mapper.get_property(self.value_attr).uselist
|
||||
|
||||
@util.memoized_property
|
||||
def _target_is_object(self):
|
||||
return getattr(self.target_class, self.value_attr).impl.uses_objects
|
||||
|
||||
def __get__(self, obj, class_):
|
||||
if self.owning_class is None:
|
||||
self.owning_class = class_ and class_ or type(obj)
|
||||
@ -242,8 +238,7 @@ class AssociationProxy(interfaces._InspectionAttr):
|
||||
return self
|
||||
|
||||
if self.scalar:
|
||||
target = getattr(obj, self.target_collection)
|
||||
return self._scalar_get(target)
|
||||
return self._scalar_get(getattr(obj, self.target_collection))
|
||||
else:
|
||||
try:
|
||||
# If the owning instance is reborn (orm session resurrect,
|
||||
@ -288,8 +283,7 @@ class AssociationProxy(interfaces._InspectionAttr):
|
||||
|
||||
def _default_getset(self, collection_class):
|
||||
attr = self.value_attr
|
||||
_getter = operator.attrgetter(attr)
|
||||
getter = lambda target: _getter(target) if target is not None else None
|
||||
getter = operator.attrgetter(attr)
|
||||
if collection_class is dict:
|
||||
setter = lambda o, k, v: setattr(o, attr, v)
|
||||
else:
|
||||
@ -394,17 +388,10 @@ class AssociationProxy(interfaces._InspectionAttr):
|
||||
|
||||
"""
|
||||
|
||||
if self._target_is_object:
|
||||
return self._comparator.has(
|
||||
return self._comparator.has(
|
||||
getattr(self.target_class, self.value_attr).\
|
||||
has(criterion, **kwargs)
|
||||
)
|
||||
else:
|
||||
if criterion is not None or kwargs:
|
||||
raise exc.ArgumentError(
|
||||
"Non-empty has() not allowed for "
|
||||
"column-targeted association proxy; use ==")
|
||||
return self._comparator.has()
|
||||
|
||||
def contains(self, obj):
|
||||
"""Produce a proxied 'contains' expression using EXISTS.
|
||||
@ -424,21 +411,10 @@ class AssociationProxy(interfaces._InspectionAttr):
|
||||
return self._comparator.any(**{self.value_attr: obj})
|
||||
|
||||
def __eq__(self, obj):
|
||||
# note the has() here will fail for collections; eq_()
|
||||
# is only allowed with a scalar.
|
||||
if obj is None:
|
||||
return or_(
|
||||
self._comparator.has(**{self.value_attr: obj}),
|
||||
self._comparator == None
|
||||
)
|
||||
else:
|
||||
return self._comparator.has(**{self.value_attr: obj})
|
||||
return self._comparator.has(**{self.value_attr: obj})
|
||||
|
||||
def __ne__(self, obj):
|
||||
# note the has() here will fail for collections; eq_()
|
||||
# is only allowed with a scalar.
|
||||
return self._comparator.has(
|
||||
getattr(self.target_class, self.value_attr) != obj)
|
||||
return not_(self.__eq__(obj))
|
||||
|
||||
|
||||
class _lazy_collection(object):
|
||||
@ -499,11 +475,9 @@ class _AssociationCollection(object):
|
||||
def __len__(self):
|
||||
return len(self.col)
|
||||
|
||||
def __bool__(self):
|
||||
def __nonzero__(self):
|
||||
return bool(self.col)
|
||||
|
||||
__nonzero__ = __bool__
|
||||
|
||||
def __getstate__(self):
|
||||
return {'parent': self.parent, 'lazy_collection': self.lazy_collection}
|
||||
|
||||
@ -540,12 +514,11 @@ class _AssociationList(_AssociationCollection):
|
||||
stop = index.stop
|
||||
step = index.step or 1
|
||||
|
||||
start = index.start or 0
|
||||
rng = list(range(index.start or 0, stop, step))
|
||||
rng = range(index.start or 0, stop, step)
|
||||
if step == 1:
|
||||
for i in rng:
|
||||
del self[start]
|
||||
i = start
|
||||
del self[index.start]
|
||||
i = index.start
|
||||
for item in value:
|
||||
self.insert(i, item)
|
||||
i += 1
|
||||
@ -596,7 +569,7 @@ class _AssociationList(_AssociationCollection):
|
||||
|
||||
def count(self, value):
|
||||
return sum([1 for _ in
|
||||
util.itertools_filter(lambda v: v == value, iter(self))])
|
||||
itertools.ifilter(lambda v: v == value, iter(self))])
|
||||
|
||||
def extend(self, values):
|
||||
for v in values:
|
||||
@ -695,8 +668,8 @@ class _AssociationList(_AssociationCollection):
|
||||
def __hash__(self):
|
||||
raise TypeError("%s objects are unhashable" % type(self).__name__)
|
||||
|
||||
for func_name, func in list(locals().items()):
|
||||
if (util.callable(func) and func.__name__ == func_name and
|
||||
for func_name, func in locals().items():
|
||||
if (util.callable(func) and func.func_name == func_name and
|
||||
not func.__doc__ and hasattr(list, func_name)):
|
||||
func.__doc__ = getattr(list, func_name).__doc__
|
||||
del func_name, func
|
||||
@ -738,7 +711,7 @@ class _AssociationDict(_AssociationCollection):
|
||||
return key in self.col
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.col.keys())
|
||||
return self.col.iterkeys()
|
||||
|
||||
def clear(self):
|
||||
self.col.clear()
|
||||
@ -783,27 +756,24 @@ class _AssociationDict(_AssociationCollection):
|
||||
def keys(self):
|
||||
return self.col.keys()
|
||||
|
||||
if util.py2k:
|
||||
def iteritems(self):
|
||||
return ((key, self._get(self.col[key])) for key in self.col)
|
||||
def iterkeys(self):
|
||||
return self.col.iterkeys()
|
||||
|
||||
def itervalues(self):
|
||||
return (self._get(self.col[key]) for key in self.col)
|
||||
def values(self):
|
||||
return [self._get(member) for member in self.col.values()]
|
||||
|
||||
def iterkeys(self):
|
||||
return self.col.iterkeys()
|
||||
def itervalues(self):
|
||||
for key in self.col:
|
||||
yield self._get(self.col[key])
|
||||
raise StopIteration
|
||||
|
||||
def values(self):
|
||||
return [self._get(member) for member in self.col.values()]
|
||||
def items(self):
|
||||
return [(k, self._get(self.col[k])) for k in self]
|
||||
|
||||
def items(self):
|
||||
return [(k, self._get(self.col[k])) for k in self]
|
||||
else:
|
||||
def items(self):
|
||||
return ((key, self._get(self.col[key])) for key in self.col)
|
||||
|
||||
def values(self):
|
||||
return (self._get(self.col[key]) for key in self.col)
|
||||
def iteritems(self):
|
||||
for key in self.col:
|
||||
yield (key, self._get(self.col[key]))
|
||||
raise StopIteration
|
||||
|
||||
def pop(self, key, default=_NotProvided):
|
||||
if default is _NotProvided:
|
||||
@ -846,8 +816,8 @@ class _AssociationDict(_AssociationCollection):
|
||||
def __hash__(self):
|
||||
raise TypeError("%s objects are unhashable" % type(self).__name__)
|
||||
|
||||
for func_name, func in list(locals().items()):
|
||||
if (util.callable(func) and func.__name__ == func_name and
|
||||
for func_name, func in locals().items():
|
||||
if (util.callable(func) and func.func_name == func_name and
|
||||
not func.__doc__ and hasattr(dict, func_name)):
|
||||
func.__doc__ = getattr(dict, func_name).__doc__
|
||||
del func_name, func
|
||||
@ -868,14 +838,12 @@ class _AssociationSet(_AssociationCollection):
|
||||
def __len__(self):
|
||||
return len(self.col)
|
||||
|
||||
def __bool__(self):
|
||||
def __nonzero__(self):
|
||||
if self.col:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
__nonzero__ = __bool__
|
||||
|
||||
def __contains__(self, value):
|
||||
for member in self.col:
|
||||
# testlib.pragma exempt:__eq__
|
||||
@ -1046,8 +1014,8 @@ class _AssociationSet(_AssociationCollection):
|
||||
def __hash__(self):
|
||||
raise TypeError("%s objects are unhashable" % type(self).__name__)
|
||||
|
||||
for func_name, func in list(locals().items()):
|
||||
if (util.callable(func) and func.__name__ == func_name and
|
||||
for func_name, func in locals().items():
|
||||
if (util.callable(func) and func.func_name == func_name and
|
||||
not func.__doc__ and hasattr(set, func_name)):
|
||||
func.__doc__ = getattr(set, func_name).__doc__
|
||||
del func_name, func
|
||||
|
@ -1,907 +0,0 @@
|
||||
# ext/automap.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Define an extension to the :mod:`sqlalchemy.ext.declarative` system
|
||||
which automatically generates mapped classes and relationships from a database
|
||||
schema, typically though not necessarily one which is reflected.
|
||||
|
||||
.. versionadded:: 0.9.1 Added :mod:`sqlalchemy.ext.automap`.
|
||||
|
||||
.. note::
|
||||
|
||||
The :mod:`sqlalchemy.ext.automap` extension should be considered
|
||||
**experimental** as of 0.9.1. Featureset and API stability is
|
||||
not guaranteed at this time.
|
||||
|
||||
It is hoped that the :class:`.AutomapBase` system provides a quick
|
||||
and modernized solution to the problem that the very famous
|
||||
`SQLSoup <https://sqlsoup.readthedocs.org/en/latest/>`_
|
||||
also tries to solve, that of generating a quick and rudimentary object
|
||||
model from an existing database on the fly. By addressing the issue strictly
|
||||
at the mapper configuration level, and integrating fully with existing
|
||||
Declarative class techniques, :class:`.AutomapBase` seeks to provide
|
||||
a well-integrated approach to the issue of expediently auto-generating ad-hoc
|
||||
mappings.
|
||||
|
||||
|
||||
Basic Use
|
||||
=========
|
||||
|
||||
The simplest usage is to reflect an existing database into a new model.
|
||||
We create a new :class:`.AutomapBase` class in a similar manner as to how
|
||||
we create a declarative base class, using :func:`.automap_base`.
|
||||
We then call :meth:`.AutomapBase.prepare` on the resulting base class,
|
||||
asking it to reflect the schema and produce mappings::
|
||||
|
||||
from sqlalchemy.ext.automap import automap_base
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import create_engine
|
||||
|
||||
Base = automap_base()
|
||||
|
||||
# engine, suppose it has two tables 'user' and 'address' set up
|
||||
engine = create_engine("sqlite:///mydatabase.db")
|
||||
|
||||
# reflect the tables
|
||||
Base.prepare(engine, reflect=True)
|
||||
|
||||
# mapped classes are now created with names by default
|
||||
# matching that of the table name.
|
||||
User = Base.classes.user
|
||||
Address = Base.classes.address
|
||||
|
||||
session = Session(engine)
|
||||
|
||||
# rudimentary relationships are produced
|
||||
session.add(Address(email_address="foo@bar.com", user=User(name="foo")))
|
||||
session.commit()
|
||||
|
||||
# collection-based relationships are by default named "<classname>_collection"
|
||||
print (u1.address_collection)
|
||||
|
||||
Above, calling :meth:`.AutomapBase.prepare` while passing along the
|
||||
:paramref:`.AutomapBase.prepare.reflect` parameter indicates that the
|
||||
:meth:`.MetaData.reflect` method will be called on this declarative base
|
||||
classes' :class:`.MetaData` collection; then, each viable
|
||||
:class:`.Table` within the :class:`.MetaData` will get a new mapped class
|
||||
generated automatically. The :class:`.ForeignKeyConstraint` objects which
|
||||
link the various tables together will be used to produce new, bidirectional
|
||||
:func:`.relationship` objects between classes. The classes and relationships
|
||||
follow along a default naming scheme that we can customize. At this point,
|
||||
our basic mapping consisting of related ``User`` and ``Address`` classes is ready
|
||||
to use in the traditional way.
|
||||
|
||||
Generating Mappings from an Existing MetaData
|
||||
=============================================
|
||||
|
||||
We can pass a pre-declared :class:`.MetaData` object to :func:`.automap_base`.
|
||||
This object can be constructed in any way, including programmatically, from
|
||||
a serialized file, or from itself being reflected using :meth:`.MetaData.reflect`.
|
||||
Below we illustrate a combination of reflection and explicit table declaration::
|
||||
|
||||
from sqlalchemy import create_engine, MetaData, Table, Column, ForeignKey
|
||||
engine = create_engine("sqlite:///mydatabase.db")
|
||||
|
||||
# produce our own MetaData object
|
||||
metadata = MetaData()
|
||||
|
||||
# we can reflect it ourselves from a database, using options
|
||||
# such as 'only' to limit what tables we look at...
|
||||
metadata.reflect(engine, only=['user', 'address'])
|
||||
|
||||
# ... or just define our own Table objects with it (or combine both)
|
||||
Table('user_order', metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('user_id', ForeignKey('user.id'))
|
||||
)
|
||||
|
||||
# we can then produce a set of mappings from this MetaData.
|
||||
Base = automap_base(metadata=metadata)
|
||||
|
||||
# calling prepare() just sets up mapped classes and relationships.
|
||||
Base.prepare()
|
||||
|
||||
# mapped classes are ready
|
||||
User, Address, Order = Base.classes.user, Base.classes.address, Base.classes.user_order
|
||||
|
||||
Specifying Classes Explcitly
|
||||
============================
|
||||
|
||||
The :mod:`.sqlalchemy.ext.automap` extension allows classes to be defined
|
||||
explicitly, in a way similar to that of the :class:`.DeferredReflection` class.
|
||||
Classes that extend from :class:`.AutomapBase` act like regular declarative
|
||||
classes, but are not immediately mapped after their construction, and are instead
|
||||
mapped when we call :meth:`.AutomapBase.prepare`. The :meth:`.AutomapBase.prepare`
|
||||
method will make use of the classes we've established based on the table name
|
||||
we use. If our schema contains tables ``user`` and ``address``, we can define
|
||||
one or both of the classes to be used::
|
||||
|
||||
from sqlalchemy.ext.automap import automap_base
|
||||
from sqlalchemy import create_engine
|
||||
|
||||
# automap base
|
||||
Base = automap_base()
|
||||
|
||||
# pre-declare User for the 'user' table
|
||||
class User(Base):
|
||||
__tablename__ = 'user'
|
||||
|
||||
# override schema elements like Columns
|
||||
user_name = Column('name', String)
|
||||
|
||||
# override relationships too, if desired.
|
||||
# we must use the same name that automap would use for the relationship,
|
||||
# and also must refer to the class name that automap will generate
|
||||
# for "address"
|
||||
address_collection = relationship("address", collection_class=set)
|
||||
|
||||
# reflect
|
||||
engine = create_engine("sqlite:///mydatabase.db")
|
||||
Base.prepare(engine, reflect=True)
|
||||
|
||||
# we still have Address generated from the tablename "address",
|
||||
# but User is the same as Base.classes.User now
|
||||
|
||||
Address = Base.classes.address
|
||||
|
||||
u1 = session.query(User).first()
|
||||
print (u1.address_collection)
|
||||
|
||||
# the backref is still there:
|
||||
a1 = session.query(Address).first()
|
||||
print (a1.user)
|
||||
|
||||
Above, one of the more intricate details is that we illustrated overriding
|
||||
one of the :func:`.relationship` objects that automap would have created.
|
||||
To do this, we needed to make sure the names match up with what automap
|
||||
would normally generate, in that the relationship name would be ``User.address_collection``
|
||||
and the name of the class referred to, from automap's perspective, is called
|
||||
``address``, even though we are referring to it as ``Address`` within our usage
|
||||
of this class.
|
||||
|
||||
Overriding Naming Schemes
|
||||
=========================
|
||||
|
||||
:mod:`.sqlalchemy.ext.automap` is tasked with producing mapped classes and
|
||||
relationship names based on a schema, which means it has decision points in how
|
||||
these names are determined. These three decision points are provided using
|
||||
functions which can be passed to the :meth:`.AutomapBase.prepare` method, and
|
||||
are known as :func:`.classname_for_table`,
|
||||
:func:`.name_for_scalar_relationship`,
|
||||
and :func:`.name_for_collection_relationship`. Any or all of these
|
||||
functions are provided as in the example below, where we use a "camel case"
|
||||
scheme for class names and a "pluralizer" for collection names using the
|
||||
`Inflect <https://pypi.python.org/pypi/inflect>`_ package::
|
||||
|
||||
import re
|
||||
import inflect
|
||||
|
||||
def camelize_classname(base, tablename, table):
|
||||
"Produce a 'camelized' class name, e.g. "
|
||||
"'words_and_underscores' -> 'WordsAndUnderscores'"
|
||||
|
||||
return str(tablename[0].upper() + \\
|
||||
re.sub(r'_(\w)', lambda m: m.group(1).upper(), tablename[1:]))
|
||||
|
||||
_pluralizer = inflect.engine()
|
||||
def pluralize_collection(base, local_cls, referred_cls, constraint):
|
||||
"Produce an 'uncamelized', 'pluralized' class name, e.g. "
|
||||
"'SomeTerm' -> 'some_terms'"
|
||||
|
||||
referred_name = referred_cls.__name__
|
||||
uncamelized = referred_name[0].lower() + \\
|
||||
re.sub(r'\W',
|
||||
lambda m: "_%s" % m.group(0).lower(),
|
||||
referred_name[1:])
|
||||
pluralized = _pluralizer.plural(uncamelized)
|
||||
return pluralized
|
||||
|
||||
from sqlalchemy.ext.automap import automap_base
|
||||
|
||||
Base = automap_base()
|
||||
|
||||
engine = create_engine("sqlite:///mydatabase.db")
|
||||
|
||||
Base.prepare(engine, reflect=True,
|
||||
classname_for_table=camelize_classname,
|
||||
name_for_collection_relationship=pluralize_collection
|
||||
)
|
||||
|
||||
From the above mapping, we would now have classes ``User`` and ``Address``,
|
||||
where the collection from ``User`` to ``Address`` is called ``User.addresses``::
|
||||
|
||||
User, Address = Base.classes.User, Base.classes.Address
|
||||
|
||||
u1 = User(addresses=[Address(email="foo@bar.com")])
|
||||
|
||||
Relationship Detection
|
||||
======================
|
||||
|
||||
The vast majority of what automap accomplishes is the generation of
|
||||
:func:`.relationship` structures based on foreign keys. The mechanism
|
||||
by which this works for many-to-one and one-to-many relationships is as follows:
|
||||
|
||||
1. A given :class:`.Table`, known to be mapped to a particular class,
|
||||
is examined for :class:`.ForeignKeyConstraint` objects.
|
||||
|
||||
2. From each :class:`.ForeignKeyConstraint`, the remote :class:`.Table`
|
||||
object present is matched up to the class to which it is to be mapped,
|
||||
if any, else it is skipped.
|
||||
|
||||
3. As the :class:`.ForeignKeyConstraint` we are examining corresponds to a reference
|
||||
from the immediate mapped class,
|
||||
the relationship will be set up as a many-to-one referring to the referred class;
|
||||
a corresponding one-to-many backref will be created on the referred class referring
|
||||
to this class.
|
||||
|
||||
4. The names of the relationships are determined using the
|
||||
:paramref:`.AutomapBase.prepare.name_for_scalar_relationship` and
|
||||
:paramref:`.AutomapBase.prepare.name_for_collection_relationship`
|
||||
callable functions. It is important to note that the default relationship
|
||||
naming derives the name from the **the actual class name**. If you've
|
||||
given a particular class an explicit name by declaring it, or specified an
|
||||
alternate class naming scheme, that's the name from which the relationship
|
||||
name will be derived.
|
||||
|
||||
5. The classes are inspected for an existing mapped property matching these
|
||||
names. If one is detected on one side, but none on the other side, :class:`.AutomapBase`
|
||||
attempts to create a relationship on the missing side, then uses the
|
||||
:paramref:`.relationship.back_populates` parameter in order to point
|
||||
the new relationship to the other side.
|
||||
|
||||
6. In the usual case where no relationship is on either side,
|
||||
:meth:`.AutomapBase.prepare` produces a :func:`.relationship` on the "many-to-one"
|
||||
side and matches it to the other using the :paramref:`.relationship.backref`
|
||||
parameter.
|
||||
|
||||
7. Production of the :func:`.relationship` and optionally the :func:`.backref`
|
||||
is handed off to the :paramref:`.AutomapBase.prepare.generate_relationship`
|
||||
function, which can be supplied by the end-user in order to augment
|
||||
the arguments passed to :func:`.relationship` or :func:`.backref` or to
|
||||
make use of custom implementations of these functions.
|
||||
|
||||
Custom Relationship Arguments
|
||||
-----------------------------
|
||||
|
||||
The :paramref:`.AutomapBase.prepare.generate_relationship` hook can be used
|
||||
to add parameters to relationships. For most cases, we can make use of the
|
||||
existing :func:`.automap.generate_relationship` function to return
|
||||
the object, after augmenting the given keyword dictionary with our own
|
||||
arguments.
|
||||
|
||||
Below is an illustration of how to send
|
||||
:paramref:`.relationship.cascade` and
|
||||
:paramref:`.relationship.passive_deletes`
|
||||
options along to all one-to-many relationships::
|
||||
|
||||
from sqlalchemy.ext.automap import generate_relationship
|
||||
|
||||
def _gen_relationship(base, direction, return_fn,
|
||||
attrname, local_cls, referred_cls, **kw):
|
||||
if direction is interfaces.ONETOMANY:
|
||||
kw['cascade'] = 'all, delete-orphan'
|
||||
kw['passive_deletes'] = True
|
||||
# make use of the built-in function to actually return
|
||||
# the result.
|
||||
return generate_relationship(base, direction, return_fn,
|
||||
attrname, local_cls, referred_cls, **kw)
|
||||
|
||||
from sqlalchemy.ext.automap import automap_base
|
||||
from sqlalchemy import create_engine
|
||||
|
||||
# automap base
|
||||
Base = automap_base()
|
||||
|
||||
engine = create_engine("sqlite:///mydatabase.db")
|
||||
Base.prepare(engine, reflect=True,
|
||||
generate_relationship=_gen_relationship)
|
||||
|
||||
Many-to-Many relationships
|
||||
--------------------------
|
||||
|
||||
:mod:`.sqlalchemy.ext.automap` will generate many-to-many relationships, e.g.
|
||||
those which contain a ``secondary`` argument. The process for producing these
|
||||
is as follows:
|
||||
|
||||
1. A given :class:`.Table` is examined for :class:`.ForeignKeyConstraint` objects,
|
||||
before any mapped class has been assigned to it.
|
||||
|
||||
2. If the table contains two and exactly two :class:`.ForeignKeyConstraint`
|
||||
objects, and all columns within this table are members of these two
|
||||
:class:`.ForeignKeyConstraint` objects, the table is assumed to be a
|
||||
"secondary" table, and will **not be mapped directly**.
|
||||
|
||||
3. The two (or one, for self-referential) external tables to which the :class:`.Table`
|
||||
refers to are matched to the classes to which they will be mapped, if any.
|
||||
|
||||
4. If mapped classes for both sides are located, a many-to-many bi-directional
|
||||
:func:`.relationship` / :func:`.backref` pair is created between the two
|
||||
classes.
|
||||
|
||||
5. The override logic for many-to-many works the same as that of one-to-many/
|
||||
many-to-one; the :func:`.generate_relationship` function is called upon
|
||||
to generate the strucures and existing attributes will be maintained.
|
||||
|
||||
Relationships with Inheritance
|
||||
------------------------------
|
||||
|
||||
:mod:`.sqlalchemy.ext.automap` will not generate any relationships between
|
||||
two classes that are in an inheritance relationship. That is, with two classes
|
||||
given as follows::
|
||||
|
||||
class Employee(Base):
|
||||
__tablename__ = 'employee'
|
||||
id = Column(Integer, primary_key=True)
|
||||
type = Column(String(50))
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity':'employee', 'polymorphic_on': type
|
||||
}
|
||||
|
||||
class Engineer(Employee):
|
||||
__tablename__ = 'engineer'
|
||||
id = Column(Integer, ForeignKey('employee.id'), primary_key=True)
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity':'engineer',
|
||||
}
|
||||
|
||||
The foreign key from ``Engineer`` to ``Employee`` is used not for a relationship,
|
||||
but to establish joined inheritance between the two classes.
|
||||
|
||||
Note that this means automap will not generate *any* relationships
|
||||
for foreign keys that link from a subclass to a superclass. If a mapping
|
||||
has actual relationships from subclass to superclass as well, those
|
||||
need to be explicit. Below, as we have two separate foreign keys
|
||||
from ``Engineer`` to ``Employee``, we need to set up both the relationship
|
||||
we want as well as the ``inherit_condition``, as these are not things
|
||||
SQLAlchemy can guess::
|
||||
|
||||
class Employee(Base):
|
||||
__tablename__ = 'employee'
|
||||
id = Column(Integer, primary_key=True)
|
||||
type = Column(String(50))
|
||||
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity':'employee', 'polymorphic_on':type
|
||||
}
|
||||
|
||||
class Engineer(Employee):
|
||||
__tablename__ = 'engineer'
|
||||
id = Column(Integer, ForeignKey('employee.id'), primary_key=True)
|
||||
favorite_employee_id = Column(Integer, ForeignKey('employee.id'))
|
||||
|
||||
favorite_employee = relationship(Employee, foreign_keys=favorite_employee_id)
|
||||
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity':'engineer',
|
||||
'inherit_condition': id == Employee.id
|
||||
}
|
||||
|
||||
|
||||
Using Automap with Explicit Declarations
|
||||
========================================
|
||||
|
||||
As noted previously, automap has no dependency on reflection, and can make
|
||||
use of any collection of :class:`.Table` objects within a :class:`.MetaData`
|
||||
collection. From this, it follows that automap can also be used
|
||||
generate missing relationships given an otherwise complete model that fully defines
|
||||
table metadata::
|
||||
|
||||
from sqlalchemy.ext.automap import automap_base
|
||||
from sqlalchemy import Column, Integer, String, ForeignKey
|
||||
|
||||
Base = automap_base()
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = 'user'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String)
|
||||
|
||||
class Address(Base):
|
||||
__tablename__ = 'address'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
email = Column(String)
|
||||
user_id = Column(ForeignKey('user.id'))
|
||||
|
||||
# produce relationships
|
||||
Base.prepare()
|
||||
|
||||
# mapping is complete, with "address_collection" and
|
||||
# "user" relationships
|
||||
a1 = Address(email='u1')
|
||||
a2 = Address(email='u2')
|
||||
u1 = User(address_collection=[a1, a2])
|
||||
assert a1.user is u1
|
||||
|
||||
Above, given mostly complete ``User`` and ``Address`` mappings, the
|
||||
:class:`.ForeignKey` which we defined on ``Address.user_id`` allowed a
|
||||
bidirectional relationship pair ``Address.user`` and ``User.address_collection``
|
||||
to be generated on the mapped classes.
|
||||
|
||||
Note that when subclassing :class:`.AutomapBase`, the :meth:`.AutomapBase.prepare`
|
||||
method is required; if not called, the classes we've declared are in an
|
||||
un-mapped state.
|
||||
|
||||
|
||||
"""
|
||||
from .declarative import declarative_base as _declarative_base
|
||||
from .declarative.base import _DeferredMapperConfig
|
||||
from ..sql import and_
|
||||
from ..schema import ForeignKeyConstraint
|
||||
from ..orm import relationship, backref, interfaces
|
||||
from .. import util
|
||||
|
||||
|
||||
def classname_for_table(base, tablename, table):
|
||||
"""Return the class name that should be used, given the name
|
||||
of a table.
|
||||
|
||||
The default implementation is::
|
||||
|
||||
return str(tablename)
|
||||
|
||||
Alternate implementations can be specified using the
|
||||
:paramref:`.AutomapBase.prepare.classname_for_table`
|
||||
parameter.
|
||||
|
||||
:param base: the :class:`.AutomapBase` class doing the prepare.
|
||||
|
||||
:param tablename: string name of the :class:`.Table`.
|
||||
|
||||
:param table: the :class:`.Table` object itself.
|
||||
|
||||
:return: a string class name.
|
||||
|
||||
.. note::
|
||||
|
||||
In Python 2, the string used for the class name **must** be a non-Unicode
|
||||
object, e.g. a ``str()`` object. The ``.name`` attribute of
|
||||
:class:`.Table` is typically a Python unicode subclass, so the ``str()``
|
||||
function should be applied to this name, after accounting for any non-ASCII
|
||||
characters.
|
||||
|
||||
"""
|
||||
return str(tablename)
|
||||
|
||||
def name_for_scalar_relationship(base, local_cls, referred_cls, constraint):
|
||||
"""Return the attribute name that should be used to refer from one
|
||||
class to another, for a scalar object reference.
|
||||
|
||||
The default implementation is::
|
||||
|
||||
return referred_cls.__name__.lower()
|
||||
|
||||
Alternate implementations can be specified using the
|
||||
:paramref:`.AutomapBase.prepare.name_for_scalar_relationship`
|
||||
parameter.
|
||||
|
||||
:param base: the :class:`.AutomapBase` class doing the prepare.
|
||||
|
||||
:param local_cls: the class to be mapped on the local side.
|
||||
|
||||
:param referred_cls: the class to be mapped on the referring side.
|
||||
|
||||
:param constraint: the :class:`.ForeignKeyConstraint` that is being
|
||||
inspected to produce this relationship.
|
||||
|
||||
"""
|
||||
return referred_cls.__name__.lower()
|
||||
|
||||
def name_for_collection_relationship(base, local_cls, referred_cls, constraint):
|
||||
"""Return the attribute name that should be used to refer from one
|
||||
class to another, for a collection reference.
|
||||
|
||||
The default implementation is::
|
||||
|
||||
return referred_cls.__name__.lower() + "_collection"
|
||||
|
||||
Alternate implementations
|
||||
can be specified using the :paramref:`.AutomapBase.prepare.name_for_collection_relationship`
|
||||
parameter.
|
||||
|
||||
:param base: the :class:`.AutomapBase` class doing the prepare.
|
||||
|
||||
:param local_cls: the class to be mapped on the local side.
|
||||
|
||||
:param referred_cls: the class to be mapped on the referring side.
|
||||
|
||||
:param constraint: the :class:`.ForeignKeyConstraint` that is being
|
||||
inspected to produce this relationship.
|
||||
|
||||
"""
|
||||
return referred_cls.__name__.lower() + "_collection"
|
||||
|
||||
def generate_relationship(base, direction, return_fn, attrname, local_cls, referred_cls, **kw):
|
||||
"""Generate a :func:`.relationship` or :func:`.backref` on behalf of two
|
||||
mapped classes.
|
||||
|
||||
An alternate implementation of this function can be specified using the
|
||||
:paramref:`.AutomapBase.prepare.generate_relationship` parameter.
|
||||
|
||||
The default implementation of this function is as follows::
|
||||
|
||||
if return_fn is backref:
|
||||
return return_fn(attrname, **kw)
|
||||
elif return_fn is relationship:
|
||||
return return_fn(referred_cls, **kw)
|
||||
else:
|
||||
raise TypeError("Unknown relationship function: %s" % return_fn)
|
||||
|
||||
:param base: the :class:`.AutomapBase` class doing the prepare.
|
||||
|
||||
:param direction: indicate the "direction" of the relationship; this will
|
||||
be one of :data:`.ONETOMANY`, :data:`.MANYTOONE`, :data:`.MANYTOONE`.
|
||||
|
||||
:param return_fn: the function that is used by default to create the
|
||||
relationship. This will be either :func:`.relationship` or :func:`.backref`.
|
||||
The :func:`.backref` function's result will be used to produce a new
|
||||
:func:`.relationship` in a second step, so it is critical that user-defined
|
||||
implementations correctly differentiate between the two functions, if
|
||||
a custom relationship function is being used.
|
||||
|
||||
:attrname: the attribute name to which this relationship is being assigned.
|
||||
If the value of :paramref:`.generate_relationship.return_fn` is the
|
||||
:func:`.backref` function, then this name is the name that is being
|
||||
assigned to the backref.
|
||||
|
||||
:param local_cls: the "local" class to which this relationship or backref
|
||||
will be locally present.
|
||||
|
||||
:param referred_cls: the "referred" class to which the relationship or backref
|
||||
refers to.
|
||||
|
||||
:param \**kw: all additional keyword arguments are passed along to the
|
||||
function.
|
||||
|
||||
:return: a :func:`.relationship` or :func:`.backref` construct, as dictated
|
||||
by the :paramref:`.generate_relationship.return_fn` parameter.
|
||||
|
||||
"""
|
||||
if return_fn is backref:
|
||||
return return_fn(attrname, **kw)
|
||||
elif return_fn is relationship:
|
||||
return return_fn(referred_cls, **kw)
|
||||
else:
|
||||
raise TypeError("Unknown relationship function: %s" % return_fn)
|
||||
|
||||
class AutomapBase(object):
|
||||
"""Base class for an "automap" schema.
|
||||
|
||||
The :class:`.AutomapBase` class can be compared to the "declarative base"
|
||||
class that is produced by the :func:`.declarative.declarative_base`
|
||||
function. In practice, the :class:`.AutomapBase` class is always used
|
||||
as a mixin along with an actual declarative base.
|
||||
|
||||
A new subclassable :class:`.AutomapBase` is typically instantated
|
||||
using the :func:`.automap_base` function.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`automap_toplevel`
|
||||
|
||||
"""
|
||||
__abstract__ = True
|
||||
|
||||
classes = None
|
||||
"""An instance of :class:`.util.Properties` containing classes.
|
||||
|
||||
This object behaves much like the ``.c`` collection on a table. Classes
|
||||
are present under the name they were given, e.g.::
|
||||
|
||||
Base = automap_base()
|
||||
Base.prepare(engine=some_engine, reflect=True)
|
||||
|
||||
User, Address = Base.classes.User, Base.classes.Address
|
||||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def prepare(cls,
|
||||
engine=None,
|
||||
reflect=False,
|
||||
classname_for_table=classname_for_table,
|
||||
collection_class=list,
|
||||
name_for_scalar_relationship=name_for_scalar_relationship,
|
||||
name_for_collection_relationship=name_for_collection_relationship,
|
||||
generate_relationship=generate_relationship):
|
||||
|
||||
"""Extract mapped classes and relationships from the :class:`.MetaData` and
|
||||
perform mappings.
|
||||
|
||||
:param engine: an :class:`.Engine` or :class:`.Connection` with which
|
||||
to perform schema reflection, if specified.
|
||||
If the :paramref:`.AutomapBase.prepare.reflect` argument is False, this
|
||||
object is not used.
|
||||
|
||||
:param reflect: if True, the :meth:`.MetaData.reflect` method is called
|
||||
on the :class:`.MetaData` associated with this :class:`.AutomapBase`.
|
||||
The :class:`.Engine` passed via :paramref:`.AutomapBase.prepare.engine` will
|
||||
be used to perform the reflection if present; else, the :class:`.MetaData`
|
||||
should already be bound to some engine else the operation will fail.
|
||||
|
||||
:param classname_for_table: callable function which will be used to
|
||||
produce new class names, given a table name. Defaults to
|
||||
:func:`.classname_for_table`.
|
||||
|
||||
:param name_for_scalar_relationship: callable function which will be used
|
||||
to produce relationship names for scalar relationships. Defaults to
|
||||
:func:`.name_for_scalar_relationship`.
|
||||
|
||||
:param name_for_collection_relationship: callable function which will be used
|
||||
to produce relationship names for collection-oriented relationships. Defaults to
|
||||
:func:`.name_for_collection_relationship`.
|
||||
|
||||
:param generate_relationship: callable function which will be used to
|
||||
actually generate :func:`.relationship` and :func:`.backref` constructs.
|
||||
Defaults to :func:`.generate_relationship`.
|
||||
|
||||
:param collection_class: the Python collection class that will be used
|
||||
when a new :func:`.relationship` object is created that represents a
|
||||
collection. Defaults to ``list``.
|
||||
|
||||
"""
|
||||
if reflect:
|
||||
cls.metadata.reflect(
|
||||
engine,
|
||||
extend_existing=True,
|
||||
autoload_replace=False
|
||||
)
|
||||
|
||||
table_to_map_config = dict(
|
||||
(m.local_table, m)
|
||||
for m in _DeferredMapperConfig.
|
||||
classes_for_base(cls, sort=False)
|
||||
)
|
||||
|
||||
many_to_many = []
|
||||
|
||||
for table in cls.metadata.tables.values():
|
||||
lcl_m2m, rem_m2m, m2m_const = _is_many_to_many(cls, table)
|
||||
if lcl_m2m is not None:
|
||||
many_to_many.append((lcl_m2m, rem_m2m, m2m_const, table))
|
||||
elif not table.primary_key:
|
||||
continue
|
||||
elif table not in table_to_map_config:
|
||||
mapped_cls = type(
|
||||
classname_for_table(cls, table.name, table),
|
||||
(cls, ),
|
||||
{"__table__": table}
|
||||
)
|
||||
map_config = _DeferredMapperConfig.config_for_cls(mapped_cls)
|
||||
cls.classes[map_config.cls.__name__] = mapped_cls
|
||||
table_to_map_config[table] = map_config
|
||||
|
||||
for map_config in table_to_map_config.values():
|
||||
_relationships_for_fks(cls,
|
||||
map_config,
|
||||
table_to_map_config,
|
||||
collection_class,
|
||||
name_for_scalar_relationship,
|
||||
name_for_collection_relationship,
|
||||
generate_relationship)
|
||||
|
||||
for lcl_m2m, rem_m2m, m2m_const, table in many_to_many:
|
||||
_m2m_relationship(cls, lcl_m2m, rem_m2m, m2m_const, table,
|
||||
table_to_map_config,
|
||||
collection_class,
|
||||
name_for_scalar_relationship,
|
||||
name_for_collection_relationship,
|
||||
generate_relationship)
|
||||
|
||||
for map_config in _DeferredMapperConfig.classes_for_base(cls):
|
||||
map_config.map()
|
||||
|
||||
|
||||
_sa_decl_prepare = True
|
||||
"""Indicate that the mapping of classes should be deferred.
|
||||
|
||||
The presence of this attribute name indicates to declarative
|
||||
that the call to mapper() should not occur immediately; instead,
|
||||
information about the table and attributes to be mapped are gathered
|
||||
into an internal structure called _DeferredMapperConfig. These
|
||||
objects can be collected later using classes_for_base(), additional
|
||||
mapping decisions can be made, and then the map() method will actually
|
||||
apply the mapping.
|
||||
|
||||
The only real reason this deferral of the whole
|
||||
thing is needed is to support primary key columns that aren't reflected
|
||||
yet when the class is declared; everything else can theoretically be
|
||||
added to the mapper later. However, the _DeferredMapperConfig is a
|
||||
nice interface in any case which exists at that not usually exposed point
|
||||
at which declarative has the class and the Table but hasn't called
|
||||
mapper() yet.
|
||||
|
||||
"""
|
||||
|
||||
def automap_base(declarative_base=None, **kw):
|
||||
"""Produce a declarative automap base.
|
||||
|
||||
This function produces a new base class that is a product of the
|
||||
:class:`.AutomapBase` class as well a declarative base produced by
|
||||
:func:`.declarative.declarative_base`.
|
||||
|
||||
All parameters other than ``declarative_base`` are keyword arguments
|
||||
that are passed directly to the :func:`.declarative.declarative_base`
|
||||
function.
|
||||
|
||||
:param declarative_base: an existing class produced by
|
||||
:func:`.declarative.declarative_base`. When this is passed, the function
|
||||
no longer invokes :func:`.declarative.declarative_base` itself, and all other
|
||||
keyword arguments are ignored.
|
||||
|
||||
:param \**kw: keyword arguments are passed along to
|
||||
:func:`.declarative.declarative_base`.
|
||||
|
||||
"""
|
||||
if declarative_base is None:
|
||||
Base = _declarative_base(**kw)
|
||||
else:
|
||||
Base = declarative_base
|
||||
|
||||
return type(
|
||||
Base.__name__,
|
||||
(AutomapBase, Base,),
|
||||
{"__abstract__": True, "classes": util.Properties({})}
|
||||
)
|
||||
|
||||
def _is_many_to_many(automap_base, table):
|
||||
fk_constraints = [const for const in table.constraints
|
||||
if isinstance(const, ForeignKeyConstraint)]
|
||||
if len(fk_constraints) != 2:
|
||||
return None, None, None
|
||||
|
||||
cols = sum(
|
||||
[[fk.parent for fk in fk_constraint.elements]
|
||||
for fk_constraint in fk_constraints], [])
|
||||
|
||||
if set(cols) != set(table.c):
|
||||
return None, None, None
|
||||
|
||||
return (
|
||||
fk_constraints[0].elements[0].column.table,
|
||||
fk_constraints[1].elements[0].column.table,
|
||||
fk_constraints
|
||||
)
|
||||
|
||||
def _relationships_for_fks(automap_base, map_config, table_to_map_config,
|
||||
collection_class,
|
||||
name_for_scalar_relationship,
|
||||
name_for_collection_relationship,
|
||||
generate_relationship):
|
||||
local_table = map_config.local_table
|
||||
local_cls = map_config.cls
|
||||
|
||||
if local_table is None:
|
||||
return
|
||||
for constraint in local_table.constraints:
|
||||
if isinstance(constraint, ForeignKeyConstraint):
|
||||
fks = constraint.elements
|
||||
referred_table = fks[0].column.table
|
||||
referred_cfg = table_to_map_config.get(referred_table, None)
|
||||
if referred_cfg is None:
|
||||
continue
|
||||
referred_cls = referred_cfg.cls
|
||||
|
||||
if local_cls is not referred_cls and issubclass(local_cls, referred_cls):
|
||||
continue
|
||||
|
||||
relationship_name = name_for_scalar_relationship(
|
||||
automap_base,
|
||||
local_cls,
|
||||
referred_cls, constraint)
|
||||
backref_name = name_for_collection_relationship(
|
||||
automap_base,
|
||||
referred_cls,
|
||||
local_cls,
|
||||
constraint
|
||||
)
|
||||
|
||||
create_backref = backref_name not in referred_cfg.properties
|
||||
|
||||
if relationship_name not in map_config.properties:
|
||||
if create_backref:
|
||||
backref_obj = generate_relationship(automap_base,
|
||||
interfaces.ONETOMANY, backref,
|
||||
backref_name, referred_cls, local_cls,
|
||||
collection_class=collection_class)
|
||||
else:
|
||||
backref_obj = None
|
||||
rel = generate_relationship(automap_base,
|
||||
interfaces.MANYTOONE,
|
||||
relationship,
|
||||
relationship_name,
|
||||
local_cls, referred_cls,
|
||||
foreign_keys=[fk.parent for fk in constraint.elements],
|
||||
backref=backref_obj,
|
||||
remote_side=[fk.column for fk in constraint.elements]
|
||||
)
|
||||
if rel is not None:
|
||||
map_config.properties[relationship_name] = rel
|
||||
if not create_backref:
|
||||
referred_cfg.properties[backref_name].back_populates = relationship_name
|
||||
elif create_backref:
|
||||
rel = generate_relationship(automap_base,
|
||||
interfaces.ONETOMANY,
|
||||
relationship,
|
||||
backref_name,
|
||||
referred_cls, local_cls,
|
||||
foreign_keys=[fk.parent for fk in constraint.elements],
|
||||
back_populates=relationship_name,
|
||||
collection_class=collection_class)
|
||||
if rel is not None:
|
||||
referred_cfg.properties[backref_name] = rel
|
||||
map_config.properties[relationship_name].back_populates = backref_name
|
||||
|
||||
def _m2m_relationship(automap_base, lcl_m2m, rem_m2m, m2m_const, table,
|
||||
table_to_map_config,
|
||||
collection_class,
|
||||
name_for_scalar_relationship,
|
||||
name_for_collection_relationship,
|
||||
generate_relationship):
|
||||
|
||||
map_config = table_to_map_config.get(lcl_m2m, None)
|
||||
referred_cfg = table_to_map_config.get(rem_m2m, None)
|
||||
if map_config is None or referred_cfg is None:
|
||||
return
|
||||
|
||||
local_cls = map_config.cls
|
||||
referred_cls = referred_cfg.cls
|
||||
|
||||
relationship_name = name_for_collection_relationship(
|
||||
automap_base,
|
||||
local_cls,
|
||||
referred_cls, m2m_const[0])
|
||||
backref_name = name_for_collection_relationship(
|
||||
automap_base,
|
||||
referred_cls,
|
||||
local_cls,
|
||||
m2m_const[1]
|
||||
)
|
||||
|
||||
create_backref = backref_name not in referred_cfg.properties
|
||||
|
||||
if relationship_name not in map_config.properties:
|
||||
if create_backref:
|
||||
backref_obj = generate_relationship(automap_base,
|
||||
interfaces.MANYTOMANY,
|
||||
backref,
|
||||
backref_name,
|
||||
referred_cls, local_cls,
|
||||
collection_class=collection_class
|
||||
)
|
||||
else:
|
||||
backref_obj = None
|
||||
rel = generate_relationship(automap_base,
|
||||
interfaces.MANYTOMANY,
|
||||
relationship,
|
||||
relationship_name,
|
||||
local_cls, referred_cls,
|
||||
secondary=table,
|
||||
primaryjoin=and_(fk.column == fk.parent for fk in m2m_const[0].elements),
|
||||
secondaryjoin=and_(fk.column == fk.parent for fk in m2m_const[1].elements),
|
||||
backref=backref_obj,
|
||||
collection_class=collection_class
|
||||
)
|
||||
if rel is not None:
|
||||
map_config.properties[relationship_name] = rel
|
||||
|
||||
if not create_backref:
|
||||
referred_cfg.properties[backref_name].back_populates = relationship_name
|
||||
elif create_backref:
|
||||
rel = generate_relationship(automap_base,
|
||||
interfaces.MANYTOMANY,
|
||||
relationship,
|
||||
backref_name,
|
||||
referred_cls, local_cls,
|
||||
secondary=table,
|
||||
primaryjoin=and_(fk.column == fk.parent for fk in m2m_const[1].elements),
|
||||
secondaryjoin=and_(fk.column == fk.parent for fk in m2m_const[0].elements),
|
||||
back_populates=relationship_name,
|
||||
collection_class=collection_class)
|
||||
if rel is not None:
|
||||
referred_cfg.properties[backref_name] = rel
|
||||
map_config.properties[relationship_name].back_populates = backref_name
|
@ -238,7 +238,7 @@ A synopsis is as follows:
|
||||
class timestamp(ColumnElement):
|
||||
type = TIMESTAMP()
|
||||
|
||||
* :class:`~sqlalchemy.sql.functions.FunctionElement` - This is a hybrid of a
|
||||
* :class:`~sqlalchemy.sql.expression.FunctionElement` - This is a hybrid of a
|
||||
``ColumnElement`` and a "from clause" like object, and represents a SQL
|
||||
function or stored procedure type of call. Since most databases support
|
||||
statements along the line of "SELECT FROM <some function>"
|
||||
|
@ -660,7 +660,7 @@ Using the Concrete Helpers
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Helper classes provides a simpler pattern for concrete inheritance.
|
||||
With these objects, the ``__declare_first__`` helper is used to configure the
|
||||
With these objects, the ``__declare_last__`` helper is used to configure the
|
||||
"polymorphic" loader for the mapper after all subclasses have been declared.
|
||||
|
||||
.. versionadded:: 0.7.3
|
||||
@ -706,26 +706,6 @@ Either ``Employee`` base can be used in the normal fashion::
|
||||
'concrete':True}
|
||||
|
||||
|
||||
The :class:`.AbstractConcreteBase` class is itself mapped, and can be
|
||||
used as a target of relationships::
|
||||
|
||||
class Company(Base):
|
||||
__tablename__ = 'company'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
employees = relationship("Employee",
|
||||
primaryjoin="Company.id == Employee.company_id")
|
||||
|
||||
|
||||
.. versionchanged:: 0.9.3 Support for use of :class:`.AbstractConcreteBase`
|
||||
as the target of a :func:`.relationship` has been improved.
|
||||
|
||||
It can also be queried directly::
|
||||
|
||||
for employee in session.query(Employee).filter(Employee.name == 'qbert'):
|
||||
print(employee)
|
||||
|
||||
|
||||
.. _declarative_mixins:
|
||||
|
||||
Mixin and Custom Base Classes
|
||||
@ -921,57 +901,11 @@ reference a common target class via many-to-one::
|
||||
__tablename__ = 'target'
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
Using Advanced Relationship Arguments (e.g. ``primaryjoin``, etc.)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:func:`~sqlalchemy.orm.relationship` definitions which require explicit
|
||||
primaryjoin, order_by etc. expressions should in all but the most
|
||||
simplistic cases use **late bound** forms
|
||||
for these arguments, meaning, using either the string form or a lambda.
|
||||
The reason for this is that the related :class:`.Column` objects which are to
|
||||
be configured using ``@declared_attr`` are not available to another
|
||||
``@declared_attr`` attribute; while the methods will work and return new
|
||||
:class:`.Column` objects, those are not the :class:`.Column` objects that
|
||||
Declarative will be using as it calls the methods on its own, thus using
|
||||
*different* :class:`.Column` objects.
|
||||
|
||||
The canonical example is the primaryjoin condition that depends upon
|
||||
another mixed-in column::
|
||||
|
||||
class RefTargetMixin(object):
|
||||
@declared_attr
|
||||
def target_id(cls):
|
||||
return Column('target_id', ForeignKey('target.id'))
|
||||
|
||||
@declared_attr
|
||||
def target(cls):
|
||||
return relationship(Target,
|
||||
primaryjoin=Target.id==cls.target_id # this is *incorrect*
|
||||
)
|
||||
|
||||
Mapping a class using the above mixin, we will get an error like::
|
||||
|
||||
sqlalchemy.exc.InvalidRequestError: this ForeignKey's parent column is not
|
||||
yet associated with a Table.
|
||||
|
||||
This is because the ``target_id`` :class:`.Column` we've called upon in our ``target()``
|
||||
method is not the same :class:`.Column` that declarative is actually going to map
|
||||
to our table.
|
||||
|
||||
The condition above is resolved using a lambda::
|
||||
|
||||
class RefTargetMixin(object):
|
||||
@declared_attr
|
||||
def target_id(cls):
|
||||
return Column('target_id', ForeignKey('target.id'))
|
||||
|
||||
@declared_attr
|
||||
def target(cls):
|
||||
return relationship(Target,
|
||||
primaryjoin=lambda: Target.id==cls.target_id
|
||||
)
|
||||
|
||||
or alternatively, the string form (which ultimately generates a lambda)::
|
||||
primaryjoin, order_by etc. expressions should use the string forms
|
||||
for these arguments, so that they are evaluated as late as possible.
|
||||
To reference the mixin class in these expressions, use the given ``cls``
|
||||
to get its name::
|
||||
|
||||
class RefTargetMixin(object):
|
||||
@declared_attr
|
||||
@ -1053,7 +987,7 @@ string values to an implementing class::
|
||||
|
||||
Above, the ``HasStringCollection`` mixin produces a :func:`.relationship`
|
||||
which refers to a newly generated class called ``StringAttribute``. The
|
||||
``StringAttribute`` class is generated with its own :class:`.Table`
|
||||
``StringAttribute`` class is generated with it's own :class:`.Table`
|
||||
definition which is local to the parent class making usage of the
|
||||
``HasStringCollection`` mixin. It also produces an :func:`.association_proxy`
|
||||
object which proxies references to the ``strings`` attribute onto the ``value``
|
||||
@ -1216,20 +1150,6 @@ assumed to be completed and the 'configure' step has finished::
|
||||
|
||||
.. versionadded:: 0.7.3
|
||||
|
||||
``__declare_first__()``
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Like ``__declare_last__()``, but is called at the beginning of mapper configuration
|
||||
via the :meth:`.MapperEvents.before_configured` event::
|
||||
|
||||
class MyClass(Base):
|
||||
@classmethod
|
||||
def __declare_first__(cls):
|
||||
""
|
||||
# do something before mappings are configured
|
||||
|
||||
.. versionadded:: 0.9.3
|
||||
|
||||
.. _declarative_abstract:
|
||||
|
||||
``__abstract__``
|
||||
@ -1291,7 +1211,7 @@ Sessions
|
||||
Note that ``declarative`` does nothing special with sessions, and is
|
||||
only intended as an easier way to configure mappers and
|
||||
:class:`~sqlalchemy.schema.Table` objects. A typical application
|
||||
setup using :class:`~sqlalchemy.orm.scoping.scoped_session` might look like::
|
||||
setup using :class:`~sqlalchemy.orm.scoped_session` might look like::
|
||||
|
||||
engine = create_engine('postgresql://scott:tiger@localhost/test')
|
||||
Session = scoped_session(sessionmaker(autocommit=False,
|
||||
|
@ -9,18 +9,15 @@
|
||||
from ...schema import Table, MetaData
|
||||
from ...orm import synonym as _orm_synonym, mapper,\
|
||||
comparable_property,\
|
||||
interfaces, properties
|
||||
from ...orm.util import polymorphic_union
|
||||
from ...orm.base import _mapper_or_none
|
||||
from ...util import OrderedDict
|
||||
interfaces
|
||||
from ...orm.util import polymorphic_union, _mapper_or_none
|
||||
from ... import exc
|
||||
import weakref
|
||||
|
||||
from .base import _as_declarative, \
|
||||
_declarative_constructor,\
|
||||
_DeferredMapperConfig, _add_attribute
|
||||
from .clsregistry import _class_resolver
|
||||
from . import clsregistry
|
||||
_MapperConfig, _add_attribute
|
||||
|
||||
|
||||
def instrument_declarative(cls, registry, metadata):
|
||||
"""Given a class, configure the class declaratively,
|
||||
@ -176,16 +173,16 @@ def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
|
||||
of the class.
|
||||
|
||||
:param bind: An optional
|
||||
:class:`~sqlalchemy.engine.Connectable`, will be assigned
|
||||
the ``bind`` attribute on the :class:`~sqlalchemy.schema.MetaData`
|
||||
:class:`~sqlalchemy.engine.base.Connectable`, will be assigned
|
||||
the ``bind`` attribute on the :class:`~sqlalchemy.MetaData`
|
||||
instance.
|
||||
|
||||
:param metadata:
|
||||
An optional :class:`~sqlalchemy.schema.MetaData` instance. All
|
||||
An optional :class:`~sqlalchemy.MetaData` instance. All
|
||||
:class:`~sqlalchemy.schema.Table` objects implicitly declared by
|
||||
subclasses of the base will share this MetaData. A MetaData instance
|
||||
will be created if none is provided. The
|
||||
:class:`~sqlalchemy.schema.MetaData` instance will be available via the
|
||||
:class:`~sqlalchemy.MetaData` instance will be available via the
|
||||
`metadata` attribute of the generated declarative base class.
|
||||
|
||||
:param mapper:
|
||||
@ -288,7 +285,7 @@ class ConcreteBase(object):
|
||||
function automatically, against all tables mapped as a subclass
|
||||
to this class. The function is called via the
|
||||
``__declare_last__()`` function, which is essentially
|
||||
a hook for the :meth:`.after_configured` event.
|
||||
a hook for the :func:`.MapperEvents.after_configured` event.
|
||||
|
||||
:class:`.ConcreteBase` produces a mapped
|
||||
table for the class itself. Compare to :class:`.AbstractConcreteBase`,
|
||||
@ -319,13 +316,13 @@ class ConcreteBase(object):
|
||||
|
||||
@classmethod
|
||||
def _create_polymorphic_union(cls, mappers):
|
||||
return polymorphic_union(OrderedDict(
|
||||
return polymorphic_union(dict(
|
||||
(mp.polymorphic_identity, mp.local_table)
|
||||
for mp in mappers
|
||||
), 'type', 'pjoin')
|
||||
|
||||
@classmethod
|
||||
def __declare_first__(cls):
|
||||
def __declare_last__(cls):
|
||||
m = cls.__mapper__
|
||||
if m.with_polymorphic:
|
||||
return
|
||||
@ -343,7 +340,7 @@ class AbstractConcreteBase(ConcreteBase):
|
||||
function automatically, against all tables mapped as a subclass
|
||||
to this class. The function is called via the
|
||||
``__declare_last__()`` function, which is essentially
|
||||
a hook for the :meth:`.after_configured` event.
|
||||
a hook for the :func:`.MapperEvents.after_configured` event.
|
||||
|
||||
:class:`.AbstractConcreteBase` does not produce a mapped
|
||||
table for the class itself. Compare to :class:`.ConcreteBase`,
|
||||
@ -370,11 +367,10 @@ class AbstractConcreteBase(ConcreteBase):
|
||||
__abstract__ = True
|
||||
|
||||
@classmethod
|
||||
def __declare_first__(cls):
|
||||
def __declare_last__(cls):
|
||||
if hasattr(cls, '__mapper__'):
|
||||
return
|
||||
|
||||
clsregistry.add_class(cls.__name__, cls)
|
||||
# can't rely on 'self_and_descendants' here
|
||||
# since technically an immediate subclass
|
||||
# might not be mapped, but a subclass
|
||||
@ -424,7 +420,7 @@ class DeferredReflection(object):
|
||||
Above, ``MyClass`` is not yet mapped. After a series of
|
||||
classes have been defined in the above fashion, all tables
|
||||
can be reflected and mappings created using
|
||||
:meth:`.prepare`::
|
||||
:meth:`.DeferredReflection.prepare`::
|
||||
|
||||
engine = create_engine("someengine://...")
|
||||
DeferredReflection.prepare(engine)
|
||||
@ -468,30 +464,11 @@ class DeferredReflection(object):
|
||||
def prepare(cls, engine):
|
||||
"""Reflect all :class:`.Table` objects for all current
|
||||
:class:`.DeferredReflection` subclasses"""
|
||||
|
||||
to_map = _DeferredMapperConfig.classes_for_base(cls)
|
||||
to_map = [m for m in _MapperConfig.configs.values()
|
||||
if issubclass(m.cls, cls)]
|
||||
for thingy in to_map:
|
||||
cls._sa_decl_prepare(thingy.local_table, engine)
|
||||
thingy.map()
|
||||
mapper = thingy.cls.__mapper__
|
||||
metadata = mapper.class_.metadata
|
||||
for rel in mapper._props.values():
|
||||
if isinstance(rel, properties.RelationshipProperty) and \
|
||||
rel.secondary is not None:
|
||||
if isinstance(rel.secondary, Table):
|
||||
cls._reflect_table(rel.secondary, engine)
|
||||
elif isinstance(rel.secondary, _class_resolver):
|
||||
rel.secondary._resolvers += (
|
||||
cls._sa_deferred_table_resolver(engine, metadata),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _sa_deferred_table_resolver(cls, engine, metadata):
|
||||
def _resolve(key):
|
||||
t1 = Table(key, metadata)
|
||||
cls._reflect_table(t1, engine)
|
||||
return t1
|
||||
return _resolve
|
||||
|
||||
@classmethod
|
||||
def _sa_decl_prepare(cls, local_table, engine):
|
||||
@ -500,14 +477,10 @@ class DeferredReflection(object):
|
||||
# will fill in db-loaded columns
|
||||
# into the existing Table object.
|
||||
if local_table is not None:
|
||||
cls._reflect_table(local_table, engine)
|
||||
|
||||
@classmethod
|
||||
def _reflect_table(cls, table, engine):
|
||||
Table(table.name,
|
||||
table.metadata,
|
||||
extend_existing=True,
|
||||
autoload_replace=False,
|
||||
autoload=True,
|
||||
autoload_with=engine,
|
||||
schema=table.schema)
|
||||
Table(local_table.name,
|
||||
local_table.metadata,
|
||||
extend_existing=True,
|
||||
autoload_replace=False,
|
||||
autoload=True,
|
||||
autoload_with=engine,
|
||||
schema=local_table.schema)
|
||||
|
@ -6,23 +6,20 @@
|
||||
"""Internal implementation for declarative."""
|
||||
|
||||
from ...schema import Table, Column
|
||||
from ...orm import mapper, class_mapper, synonym
|
||||
from ...orm import mapper, class_mapper
|
||||
from ...orm.interfaces import MapperProperty
|
||||
from ...orm.properties import ColumnProperty, CompositeProperty
|
||||
from ...orm.attributes import QueryableAttribute
|
||||
from ...orm.base import _is_mapped_class
|
||||
from ...orm.util import _is_mapped_class
|
||||
from ... import util, exc
|
||||
from ...util import topological
|
||||
from ...sql import expression
|
||||
from ... import event
|
||||
from . import clsregistry
|
||||
import collections
|
||||
import weakref
|
||||
|
||||
|
||||
def _declared_mapping_info(cls):
|
||||
# deferred mapping
|
||||
if _DeferredMapperConfig.has_cls(cls):
|
||||
return _DeferredMapperConfig.config_for_cls(cls)
|
||||
if cls in _MapperConfig.configs:
|
||||
return _MapperConfig.configs[cls]
|
||||
# regular mapping
|
||||
elif _is_mapped_class(cls):
|
||||
return class_mapper(cls, configure=False)
|
||||
@ -52,10 +49,6 @@ def _as_declarative(cls, classname, dict_):
|
||||
@event.listens_for(mapper, "after_configured")
|
||||
def go():
|
||||
cls.__declare_last__()
|
||||
if '__declare_first__' in base.__dict__:
|
||||
@event.listens_for(mapper, "before_configured")
|
||||
def go():
|
||||
cls.__declare_first__()
|
||||
if '__abstract__' in base.__dict__:
|
||||
if (base is cls or
|
||||
(base in cls.__bases__ and not _is_declarative_inherits)
|
||||
@ -155,15 +148,6 @@ def _as_declarative(cls, classname, dict_):
|
||||
if isinstance(value, declarative_props):
|
||||
value = getattr(cls, k)
|
||||
|
||||
elif isinstance(value, QueryableAttribute) and \
|
||||
value.class_ is not cls and \
|
||||
value.key != k:
|
||||
# detect a QueryableAttribute that's already mapped being
|
||||
# assigned elsewhere in userland, turn into a synonym()
|
||||
value = synonym(value.key)
|
||||
setattr(cls, k, value)
|
||||
|
||||
|
||||
if (isinstance(value, tuple) and len(value) == 1 and
|
||||
isinstance(value[0], (Column, MapperProperty))):
|
||||
util.warn("Ignoring declarative-like tuple value of attribute "
|
||||
@ -189,19 +173,15 @@ def _as_declarative(cls, classname, dict_):
|
||||
|
||||
# extract columns from the class dict
|
||||
declared_columns = set()
|
||||
name_to_prop_key = collections.defaultdict(set)
|
||||
for key, c in list(our_stuff.items()):
|
||||
for key, c in our_stuff.iteritems():
|
||||
if isinstance(c, (ColumnProperty, CompositeProperty)):
|
||||
for col in c.columns:
|
||||
if isinstance(col, Column) and \
|
||||
col.table is None:
|
||||
_undefer_column_name(key, col)
|
||||
if not isinstance(c, CompositeProperty):
|
||||
name_to_prop_key[col.name].add(key)
|
||||
declared_columns.add(col)
|
||||
elif isinstance(c, Column):
|
||||
_undefer_column_name(key, c)
|
||||
name_to_prop_key[c.name].add(key)
|
||||
declared_columns.add(c)
|
||||
# if the column is the same name as the key,
|
||||
# remove it from the explicit properties dict.
|
||||
@ -210,15 +190,6 @@ def _as_declarative(cls, classname, dict_):
|
||||
# in multi-column ColumnProperties.
|
||||
if key == c.key:
|
||||
del our_stuff[key]
|
||||
|
||||
for name, keys in name_to_prop_key.items():
|
||||
if len(keys) > 1:
|
||||
util.warn(
|
||||
"On class %r, Column object %r named directly multiple times, "
|
||||
"only one will be used: %s" %
|
||||
(classname, name, (", ".join(sorted(keys))))
|
||||
)
|
||||
|
||||
declared_columns = sorted(
|
||||
declared_columns, key=lambda c: c._creation_order)
|
||||
table = None
|
||||
@ -310,24 +281,19 @@ def _as_declarative(cls, classname, dict_):
|
||||
inherited_mapped_table is not inherited_table:
|
||||
inherited_mapped_table._refresh_for_new_column(c)
|
||||
|
||||
defer_map = hasattr(cls, '_sa_decl_prepare')
|
||||
if defer_map:
|
||||
cfg_cls = _DeferredMapperConfig
|
||||
else:
|
||||
cfg_cls = _MapperConfig
|
||||
mt = cfg_cls(mapper_cls,
|
||||
mt = _MapperConfig(mapper_cls,
|
||||
cls, table,
|
||||
inherits,
|
||||
declared_columns,
|
||||
column_copies,
|
||||
our_stuff,
|
||||
mapper_args_fn)
|
||||
if not defer_map:
|
||||
if not hasattr(cls, '_sa_decl_prepare'):
|
||||
mt.map()
|
||||
|
||||
|
||||
class _MapperConfig(object):
|
||||
|
||||
configs = util.OrderedDict()
|
||||
mapped_table = None
|
||||
|
||||
def __init__(self, mapper_cls,
|
||||
@ -345,7 +311,7 @@ class _MapperConfig(object):
|
||||
self.mapper_args_fn = mapper_args_fn
|
||||
self.declared_columns = declared_columns
|
||||
self.column_copies = column_copies
|
||||
|
||||
self.configs[cls] = self
|
||||
|
||||
def _prepare_mapper_arguments(self):
|
||||
properties = self.properties
|
||||
@ -388,7 +354,7 @@ class _MapperConfig(object):
|
||||
# in which case the mapper makes this combination).
|
||||
# See if the superclass has a similar column property.
|
||||
# If so, join them together.
|
||||
for k, col in list(properties.items()):
|
||||
for k, col in properties.items():
|
||||
if not isinstance(col, expression.ColumnElement):
|
||||
continue
|
||||
if k in inherited_mapper._props:
|
||||
@ -402,6 +368,7 @@ class _MapperConfig(object):
|
||||
return result_mapper_args
|
||||
|
||||
def map(self):
|
||||
self.configs.pop(self.cls, None)
|
||||
mapper_args = self._prepare_mapper_arguments()
|
||||
self.cls.__mapper__ = self.mapper_cls(
|
||||
self.cls,
|
||||
@ -409,63 +376,6 @@ class _MapperConfig(object):
|
||||
**mapper_args
|
||||
)
|
||||
|
||||
class _DeferredMapperConfig(_MapperConfig):
|
||||
_configs = util.OrderedDict()
|
||||
|
||||
@property
|
||||
def cls(self):
|
||||
return self._cls()
|
||||
|
||||
@cls.setter
|
||||
def cls(self, class_):
|
||||
self._cls = weakref.ref(class_, self._remove_config_cls)
|
||||
self._configs[self._cls] = self
|
||||
|
||||
@classmethod
|
||||
def _remove_config_cls(cls, ref):
|
||||
cls._configs.pop(ref, None)
|
||||
|
||||
@classmethod
|
||||
def has_cls(cls, class_):
|
||||
# 2.6 fails on weakref if class_ is an old style class
|
||||
return isinstance(class_, type) and \
|
||||
weakref.ref(class_) in cls._configs
|
||||
|
||||
@classmethod
|
||||
def config_for_cls(cls, class_):
|
||||
return cls._configs[weakref.ref(class_)]
|
||||
|
||||
|
||||
@classmethod
|
||||
def classes_for_base(cls, base_cls, sort=True):
|
||||
classes_for_base = [m for m in cls._configs.values()
|
||||
if issubclass(m.cls, base_cls)]
|
||||
if not sort:
|
||||
return classes_for_base
|
||||
|
||||
all_m_by_cls = dict(
|
||||
(m.cls, m)
|
||||
for m in classes_for_base
|
||||
)
|
||||
|
||||
tuples = []
|
||||
for m_cls in all_m_by_cls:
|
||||
tuples.extend(
|
||||
(all_m_by_cls[base_cls], all_m_by_cls[m_cls])
|
||||
for base_cls in m_cls.__bases__
|
||||
if base_cls in all_m_by_cls
|
||||
)
|
||||
return list(
|
||||
topological.sort(
|
||||
tuples,
|
||||
classes_for_base
|
||||
)
|
||||
)
|
||||
|
||||
def map(self):
|
||||
self._configs.pop(self._cls, None)
|
||||
super(_DeferredMapperConfig, self).map()
|
||||
|
||||
|
||||
def _add_attribute(cls, key, value):
|
||||
"""add an attribute to an existing declarative class.
|
||||
@ -474,7 +384,6 @@ def _add_attribute(cls, key, value):
|
||||
adds it to the Mapper, adds a column to the mapped Table, etc.
|
||||
|
||||
"""
|
||||
|
||||
if '__mapper__' in cls.__dict__:
|
||||
if isinstance(value, Column):
|
||||
_undefer_column_name(key, value)
|
||||
@ -491,14 +400,6 @@ def _add_attribute(cls, key, value):
|
||||
key,
|
||||
clsregistry._deferred_relationship(cls, value)
|
||||
)
|
||||
elif isinstance(value, QueryableAttribute) and value.key != key:
|
||||
# detect a QueryableAttribute that's already mapped being
|
||||
# assigned elsewhere in userland, turn into a synonym()
|
||||
value = synonym(value.key)
|
||||
cls.__mapper__.add_property(
|
||||
key,
|
||||
clsregistry._deferred_relationship(cls, value)
|
||||
)
|
||||
else:
|
||||
type.__setattr__(cls, key, value)
|
||||
else:
|
||||
|
@ -14,7 +14,6 @@ from ...orm.properties import ColumnProperty, RelationshipProperty, \
|
||||
from ...schema import _get_table_key
|
||||
from ...orm import class_mapper, interfaces
|
||||
from ... import util
|
||||
from ... import inspection
|
||||
from ... import exc
|
||||
import weakref
|
||||
|
||||
@ -208,9 +207,6 @@ class _GetColumns(object):
|
||||
" directly to a Column)." % key)
|
||||
return getattr(self.cls, key)
|
||||
|
||||
inspection._inspects(_GetColumns)(
|
||||
lambda target: inspection.inspect(target.cls))
|
||||
|
||||
|
||||
class _GetTable(object):
|
||||
def __init__(self, key, metadata):
|
||||
@ -229,62 +225,47 @@ def _determine_container(key, value):
|
||||
return _GetColumns(value)
|
||||
|
||||
|
||||
class _class_resolver(object):
|
||||
def __init__(self, cls, prop, fallback, arg):
|
||||
self.cls = cls
|
||||
self.prop = prop
|
||||
self.arg = self._declarative_arg = arg
|
||||
self.fallback = fallback
|
||||
self._dict = util.PopulateDict(self._access_cls)
|
||||
self._resolvers = ()
|
||||
|
||||
def _access_cls(self, key):
|
||||
cls = self.cls
|
||||
if key in cls._decl_class_registry:
|
||||
return _determine_container(key, cls._decl_class_registry[key])
|
||||
elif key in cls.metadata.tables:
|
||||
return cls.metadata.tables[key]
|
||||
elif key in cls.metadata._schemas:
|
||||
return _GetTable(key, cls.metadata)
|
||||
elif '_sa_module_registry' in cls._decl_class_registry and \
|
||||
key in cls._decl_class_registry['_sa_module_registry']:
|
||||
registry = cls._decl_class_registry['_sa_module_registry']
|
||||
return registry.resolve_attr(key)
|
||||
elif self._resolvers:
|
||||
for resolv in self._resolvers:
|
||||
value = resolv(key)
|
||||
if value is not None:
|
||||
return value
|
||||
|
||||
return self.fallback[key]
|
||||
|
||||
def __call__(self):
|
||||
try:
|
||||
x = eval(self.arg, globals(), self._dict)
|
||||
|
||||
if isinstance(x, _GetColumns):
|
||||
return x.cls
|
||||
else:
|
||||
return x
|
||||
except NameError as n:
|
||||
raise exc.InvalidRequestError(
|
||||
"When initializing mapper %s, expression %r failed to "
|
||||
"locate a name (%r). If this is a class name, consider "
|
||||
"adding this relationship() to the %r class after "
|
||||
"both dependent classes have been defined." %
|
||||
(self.prop.parent, self.arg, n.args[0], self.cls)
|
||||
)
|
||||
|
||||
|
||||
def _resolver(cls, prop):
|
||||
import sqlalchemy
|
||||
from sqlalchemy.orm import foreign, remote
|
||||
|
||||
fallback = sqlalchemy.__dict__.copy()
|
||||
fallback.update({'foreign': foreign, 'remote': remote})
|
||||
|
||||
def resolve_arg(arg):
|
||||
return _class_resolver(cls, prop, fallback, arg)
|
||||
import sqlalchemy
|
||||
from sqlalchemy.orm import foreign, remote
|
||||
|
||||
fallback = sqlalchemy.__dict__.copy()
|
||||
fallback.update({'foreign': foreign, 'remote': remote})
|
||||
|
||||
def access_cls(key):
|
||||
if key in cls._decl_class_registry:
|
||||
return _determine_container(key, cls._decl_class_registry[key])
|
||||
elif key in cls.metadata.tables:
|
||||
return cls.metadata.tables[key]
|
||||
elif key in cls.metadata._schemas:
|
||||
return _GetTable(key, cls.metadata)
|
||||
elif '_sa_module_registry' in cls._decl_class_registry and \
|
||||
key in cls._decl_class_registry['_sa_module_registry']:
|
||||
registry = cls._decl_class_registry['_sa_module_registry']
|
||||
return registry.resolve_attr(key)
|
||||
else:
|
||||
return fallback[key]
|
||||
|
||||
d = util.PopulateDict(access_cls)
|
||||
|
||||
def return_cls():
|
||||
try:
|
||||
x = eval(arg, globals(), d)
|
||||
|
||||
if isinstance(x, _GetColumns):
|
||||
return x.cls
|
||||
else:
|
||||
return x
|
||||
except NameError, n:
|
||||
raise exc.InvalidRequestError(
|
||||
"When initializing mapper %s, expression %r failed to "
|
||||
"locate a name (%r). If this is a class name, consider "
|
||||
"adding this relationship() to the %r class after "
|
||||
"both dependent classes have been defined." %
|
||||
(prop.parent, arg, n.args[0], cls)
|
||||
)
|
||||
return return_cls
|
||||
return resolve_arg
|
||||
|
||||
|
||||
@ -296,14 +277,14 @@ def _deferred_relationship(cls, prop):
|
||||
for attr in ('argument', 'order_by', 'primaryjoin', 'secondaryjoin',
|
||||
'secondary', '_user_defined_foreign_keys', 'remote_side'):
|
||||
v = getattr(prop, attr)
|
||||
if isinstance(v, util.string_types):
|
||||
if isinstance(v, basestring):
|
||||
setattr(prop, attr, resolve_arg(v))
|
||||
|
||||
if prop.backref and isinstance(prop.backref, tuple):
|
||||
key, kwargs = prop.backref
|
||||
for attr in ('primaryjoin', 'secondaryjoin', 'secondary',
|
||||
'foreign_keys', 'remote_side', 'order_by'):
|
||||
if attr in kwargs and isinstance(kwargs[attr], str):
|
||||
if attr in kwargs and isinstance(kwargs[attr], basestring):
|
||||
kwargs[attr] = resolve_arg(kwargs[attr])
|
||||
|
||||
return prop
|
||||
|
@ -803,6 +803,6 @@ class Comparator(interfaces.PropComparator):
|
||||
expr = expr.__clause_element__()
|
||||
return expr
|
||||
|
||||
def adapt_to_entity(self, adapt_to_entity):
|
||||
def adapted(self, adapter):
|
||||
# interesting....
|
||||
return self
|
||||
|
@ -22,7 +22,7 @@ see the example :ref:`examples_instrumentation`.
|
||||
:mod:`sqlalchemy.orm.instrumentation` so that it
|
||||
takes effect, including recognition of
|
||||
``__sa_instrumentation_manager__`` on mapped classes, as
|
||||
well :data:`.instrumentation_finders`
|
||||
well :attr:`.instrumentation_finders`
|
||||
being used to determine class instrumentation resolution.
|
||||
|
||||
"""
|
||||
@ -31,7 +31,7 @@ from ..orm.instrumentation import (
|
||||
ClassManager, InstrumentationFactory, _default_state_getter,
|
||||
_default_dict_getter, _default_manager_getter
|
||||
)
|
||||
from ..orm import attributes, collections, base as orm_base
|
||||
from ..orm import attributes, collections
|
||||
from .. import util
|
||||
from ..orm import exc as orm_exc
|
||||
import weakref
|
||||
@ -399,9 +399,9 @@ def _install_lookups(lookups):
|
||||
instance_state = lookups['instance_state']
|
||||
instance_dict = lookups['instance_dict']
|
||||
manager_of_class = lookups['manager_of_class']
|
||||
orm_base.instance_state = attributes.instance_state = \
|
||||
attributes.instance_state = \
|
||||
orm_instrumentation.instance_state = instance_state
|
||||
orm_base.instance_dict = attributes.instance_dict = \
|
||||
attributes.instance_dict = \
|
||||
orm_instrumentation.instance_dict = instance_dict
|
||||
orm_base.manager_of_class = attributes.manager_of_class = \
|
||||
attributes.manager_of_class = \
|
||||
orm_instrumentation.manager_of_class = manager_of_class
|
||||
|
@ -177,7 +177,7 @@ callbacks. In our case, this is a good thing, since if this dictionary were
|
||||
picklable, it could lead to an excessively large pickle size for our value
|
||||
objects that are pickled by themselves outside of the context of the parent.
|
||||
The developer responsibility here is only to provide a ``__getstate__`` method
|
||||
that excludes the :meth:`~MutableBase._parents` collection from the pickle
|
||||
that excludes the :meth:`~.MutableBase._parents` collection from the pickle
|
||||
stream::
|
||||
|
||||
class MyMutableType(Mutable):
|
||||
@ -327,7 +327,7 @@ Supporting Pickling
|
||||
|
||||
As is the case with :class:`.Mutable`, the :class:`.MutableComposite` helper
|
||||
class uses a ``weakref.WeakKeyDictionary`` available via the
|
||||
:meth:`MutableBase._parents` attribute which isn't picklable. If we need to
|
||||
:meth:`.MutableBase._parents` attribute which isn't picklable. If we need to
|
||||
pickle instances of ``Point`` or its owning class ``Vertex``, we at least need
|
||||
to define a ``__getstate__`` that doesn't include the ``_parents`` dictionary.
|
||||
Below we define both a ``__getstate__`` and a ``__setstate__`` that package up
|
||||
@ -344,7 +344,7 @@ the minimal form of our ``Point`` class::
|
||||
|
||||
As with :class:`.Mutable`, the :class:`.MutableComposite` augments the
|
||||
pickling process of the parent's object-relational state so that the
|
||||
:meth:`MutableBase._parents` collection is restored to all ``Point`` objects.
|
||||
:meth:`.MutableBase._parents` collection is restored to all ``Point`` objects.
|
||||
|
||||
"""
|
||||
from ..orm.attributes import flag_modified
|
||||
@ -540,7 +540,7 @@ class Mutable(MutableBase):
|
||||
|
||||
To associate a particular mutable type with all occurrences of a
|
||||
particular type, use the :meth:`.Mutable.associate_with` classmethod
|
||||
of the particular :class:`.Mutable` subclass to establish a global
|
||||
of the particular :meth:`.Mutable` subclass to establish a global
|
||||
association.
|
||||
|
||||
.. warning::
|
||||
@ -586,14 +586,14 @@ class MutableComposite(MutableBase):
|
||||
setattr(parent, attr_name, value)
|
||||
|
||||
def _setup_composite_listener():
|
||||
import types
|
||||
def _listen_for_type(mapper, class_):
|
||||
for prop in mapper.iterate_properties:
|
||||
if (hasattr(prop, 'composite_class') and
|
||||
isinstance(prop.composite_class, type) and
|
||||
issubclass(prop.composite_class, MutableComposite)):
|
||||
if (hasattr(prop, 'composite_class') and (type(prop.composite_class) in (types.ClassType, types.TypeType)) and
|
||||
issubclass(prop.composite_class, MutableComposite)):
|
||||
prop.composite_class._listen_on_attribute(
|
||||
getattr(class_, prop.key), False, class_)
|
||||
if not event.contains(Mapper, "mapper_configured", _listen_for_type):
|
||||
if not Mapper.dispatch.mapper_configured._contains(Mapper, _listen_for_type):
|
||||
event.listen(Mapper, 'mapper_configured', _listen_for_type)
|
||||
_setup_composite_listener()
|
||||
|
||||
|
@ -103,7 +103,7 @@ attribute, so that the ordering is correct when first loaded.
|
||||
SQLAlchemy's unit of work performs all INSERTs before DELETEs within a
|
||||
single flush. In the case of a primary key, it will trade
|
||||
an INSERT/DELETE of the same primary key for an UPDATE statement in order
|
||||
to lessen the impact of this limitation, however this does not take place
|
||||
to lessen the impact of this lmitation, however this does not take place
|
||||
for a UNIQUE column.
|
||||
A future feature will allow the "DELETE before INSERT" behavior to be
|
||||
possible, allevating this limitation, though this feature will require
|
||||
@ -335,7 +335,7 @@ class OrderingList(list):
|
||||
if stop < 0:
|
||||
stop += len(self)
|
||||
|
||||
for i in range(start, stop, step):
|
||||
for i in xrange(start, stop, step):
|
||||
self.__setitem__(i, entity[i])
|
||||
else:
|
||||
self._order_entity(index, entity, True)
|
||||
@ -345,6 +345,7 @@ class OrderingList(list):
|
||||
super(OrderingList, self).__delitem__(index)
|
||||
self._reorder()
|
||||
|
||||
# Py2K
|
||||
def __setslice__(self, start, end, values):
|
||||
super(OrderingList, self).__setslice__(start, end, values)
|
||||
self._reorder()
|
||||
@ -352,12 +353,13 @@ class OrderingList(list):
|
||||
def __delslice__(self, start, end):
|
||||
super(OrderingList, self).__delslice__(start, end)
|
||||
self._reorder()
|
||||
# end Py2K
|
||||
|
||||
def __reduce__(self):
|
||||
return _reconstitute, (self.__class__, self.__dict__, list(self))
|
||||
|
||||
for func_name, func in list(locals().items()):
|
||||
if (util.callable(func) and func.__name__ == func_name and
|
||||
for func_name, func in locals().items():
|
||||
if (util.callable(func) and func.func_name == func_name and
|
||||
not func.__doc__ and hasattr(list, func_name)):
|
||||
func.__doc__ = getattr(list, func_name).__doc__
|
||||
del func_name, func
|
||||
|
@ -58,9 +58,24 @@ from ..orm.interfaces import MapperProperty
|
||||
from ..orm.attributes import QueryableAttribute
|
||||
from .. import Table, Column
|
||||
from ..engine import Engine
|
||||
from ..util import pickle, byte_buffer, b64encode, b64decode, text_type
|
||||
from ..util import pickle, text_type
|
||||
import re
|
||||
import base64
|
||||
# Py3K
|
||||
#from io import BytesIO as byte_buffer
|
||||
# Py2K
|
||||
from cStringIO import StringIO as byte_buffer
|
||||
# end Py2K
|
||||
|
||||
# Py3K
|
||||
#def b64encode(x):
|
||||
# return base64.b64encode(x).decode('ascii')
|
||||
#def b64decode(x):
|
||||
# return base64.b64decode(x.encode('ascii'))
|
||||
# Py2K
|
||||
b64encode = base64.b64encode
|
||||
b64decode = base64.b64decode
|
||||
# end Py2K
|
||||
|
||||
__all__ = ['Serializer', 'Deserializer', 'dumps', 'loads']
|
||||
|
||||
|
@ -39,11 +39,11 @@ def inspect(subject, raiseerr=True):
|
||||
|
||||
The returned value in some cases may be the
|
||||
same object as the one given, such as if a
|
||||
:class:`.Mapper` object is passed. In other
|
||||
:class:`.orm.Mapper` object is passed. In other
|
||||
cases, it will be an instance of the registered
|
||||
inspection type for the given object, such as
|
||||
if an :class:`.engine.Engine` is passed, an
|
||||
:class:`.Inspector` object is returned.
|
||||
if a :class:`.engine.Engine` is passed, an
|
||||
:class:`.engine.Inspector` object is returned.
|
||||
|
||||
:param subject: the subject to be inspected.
|
||||
:param raiseerr: When ``True``, if the given subject
|
||||
@ -87,6 +87,5 @@ def _inspects(*types):
|
||||
return decorate
|
||||
|
||||
|
||||
def _self_inspects(cls):
|
||||
_inspects(cls)(True)
|
||||
return cls
|
||||
def _self_inspects(*types):
|
||||
_inspects(*types)(True)
|
||||
|
@ -21,7 +21,7 @@ import logging
|
||||
import sys
|
||||
|
||||
# set initial level to WARN. This so that
|
||||
# log statements don't occur in the absence of explicit
|
||||
# log statements don't occur in the absense of explicit
|
||||
# logging being enabled for 'sqlalchemy'.
|
||||
rootlogger = logging.getLogger('sqlalchemy')
|
||||
if rootlogger.level == logging.NOTSET:
|
||||
@ -38,13 +38,17 @@ def _add_default_handler(logger):
|
||||
_logged_classes = set()
|
||||
|
||||
|
||||
def class_logger(cls):
|
||||
def class_logger(cls, enable=False):
|
||||
logger = logging.getLogger(cls.__module__ + "." + cls.__name__)
|
||||
if enable == 'debug':
|
||||
logger.setLevel(logging.DEBUG)
|
||||
elif enable == 'info':
|
||||
logger.setLevel(logging.INFO)
|
||||
cls._should_log_debug = lambda self: logger.isEnabledFor(logging.DEBUG)
|
||||
cls._should_log_info = lambda self: logger.isEnabledFor(logging.INFO)
|
||||
cls.logger = logger
|
||||
_logged_classes.add(cls)
|
||||
return cls
|
||||
|
||||
|
||||
class Identified(object):
|
||||
logging_name = None
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -14,19 +14,109 @@ defines a large part of the ORM's interactivity.
|
||||
"""
|
||||
|
||||
import operator
|
||||
from operator import itemgetter
|
||||
|
||||
from .. import util, event, inspection
|
||||
from . import interfaces, collections, exc as orm_exc
|
||||
from . import interfaces, collections, events, exc as orm_exc
|
||||
from .instrumentation import instance_state, instance_dict, manager_of_class
|
||||
|
||||
from .base import instance_state, instance_dict, manager_of_class
|
||||
orm_util = util.importlater("sqlalchemy.orm", "util")
|
||||
|
||||
PASSIVE_NO_RESULT = util.symbol('PASSIVE_NO_RESULT',
|
||||
"""Symbol returned by a loader callable or other attribute/history
|
||||
retrieval operation when a value could not be determined, based
|
||||
on loader callable flags.
|
||||
"""
|
||||
)
|
||||
|
||||
ATTR_WAS_SET = util.symbol('ATTR_WAS_SET',
|
||||
"""Symbol returned by a loader callable to indicate the
|
||||
retrieved value, or values, were assigned to their attributes
|
||||
on the target object.
|
||||
""")
|
||||
|
||||
ATTR_EMPTY = util.symbol('ATTR_EMPTY',
|
||||
"""Symbol used internally to indicate an attribute had no callable.
|
||||
""")
|
||||
|
||||
NO_VALUE = util.symbol('NO_VALUE',
|
||||
"""Symbol which may be placed as the 'previous' value of an attribute,
|
||||
indicating no value was loaded for an attribute when it was modified,
|
||||
and flags indicated we were not to load it.
|
||||
"""
|
||||
)
|
||||
|
||||
NEVER_SET = util.symbol('NEVER_SET',
|
||||
"""Symbol which may be placed as the 'previous' value of an attribute
|
||||
indicating that the attribute had not been assigned to previously.
|
||||
"""
|
||||
)
|
||||
|
||||
NO_CHANGE = util.symbol("NO_CHANGE",
|
||||
"""No callables or SQL should be emitted on attribute access
|
||||
and no state should change""", canonical=0
|
||||
)
|
||||
|
||||
CALLABLES_OK = util.symbol("CALLABLES_OK",
|
||||
"""Loader callables can be fired off if a value
|
||||
is not present.""", canonical=1
|
||||
)
|
||||
|
||||
SQL_OK = util.symbol("SQL_OK",
|
||||
"""Loader callables can emit SQL at least on scalar value
|
||||
attributes.""", canonical=2)
|
||||
|
||||
RELATED_OBJECT_OK = util.symbol("RELATED_OBJECT_OK",
|
||||
"""callables can use SQL to load related objects as well
|
||||
as scalar value attributes.
|
||||
""", canonical=4
|
||||
)
|
||||
|
||||
INIT_OK = util.symbol("INIT_OK",
|
||||
"""Attributes should be initialized with a blank
|
||||
value (None or an empty collection) upon get, if no other
|
||||
value can be obtained.
|
||||
""", canonical=8
|
||||
)
|
||||
|
||||
NON_PERSISTENT_OK = util.symbol("NON_PERSISTENT_OK",
|
||||
"""callables can be emitted if the parent is not persistent.""",
|
||||
canonical=16
|
||||
)
|
||||
|
||||
LOAD_AGAINST_COMMITTED = util.symbol("LOAD_AGAINST_COMMITTED",
|
||||
"""callables should use committed values as primary/foreign keys during a load
|
||||
""", canonical=32
|
||||
)
|
||||
|
||||
# pre-packaged sets of flags used as inputs
|
||||
PASSIVE_OFF = util.symbol("PASSIVE_OFF",
|
||||
"Callables can be emitted in all cases.",
|
||||
canonical=(RELATED_OBJECT_OK | NON_PERSISTENT_OK |
|
||||
INIT_OK | CALLABLES_OK | SQL_OK)
|
||||
)
|
||||
PASSIVE_RETURN_NEVER_SET = util.symbol("PASSIVE_RETURN_NEVER_SET",
|
||||
"""PASSIVE_OFF ^ INIT_OK""",
|
||||
canonical=PASSIVE_OFF ^ INIT_OK
|
||||
)
|
||||
PASSIVE_NO_INITIALIZE = util.symbol("PASSIVE_NO_INITIALIZE",
|
||||
"PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK",
|
||||
canonical=PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK
|
||||
)
|
||||
PASSIVE_NO_FETCH = util.symbol("PASSIVE_NO_FETCH",
|
||||
"PASSIVE_OFF ^ SQL_OK",
|
||||
canonical=PASSIVE_OFF ^ SQL_OK
|
||||
)
|
||||
PASSIVE_NO_FETCH_RELATED = util.symbol("PASSIVE_NO_FETCH_RELATED",
|
||||
"PASSIVE_OFF ^ RELATED_OBJECT_OK",
|
||||
canonical=PASSIVE_OFF ^ RELATED_OBJECT_OK
|
||||
)
|
||||
PASSIVE_ONLY_PERSISTENT = util.symbol("PASSIVE_ONLY_PERSISTENT",
|
||||
"PASSIVE_OFF ^ NON_PERSISTENT_OK",
|
||||
canonical=PASSIVE_OFF ^ NON_PERSISTENT_OK
|
||||
)
|
||||
|
||||
from .base import PASSIVE_NO_RESULT, ATTR_WAS_SET, ATTR_EMPTY, NO_VALUE,\
|
||||
NEVER_SET, NO_CHANGE, CALLABLES_OK, SQL_OK, RELATED_OBJECT_OK,\
|
||||
INIT_OK, NON_PERSISTENT_OK, LOAD_AGAINST_COMMITTED, PASSIVE_OFF,\
|
||||
PASSIVE_RETURN_NEVER_SET, PASSIVE_NO_INITIALIZE, PASSIVE_NO_FETCH,\
|
||||
PASSIVE_NO_FETCH_RELATED, PASSIVE_ONLY_PERSISTENT, NO_AUTOFLUSH
|
||||
from .base import state_str, instance_str
|
||||
|
||||
@inspection._self_inspects
|
||||
class QueryableAttribute(interfaces._MappedAttribute,
|
||||
interfaces._InspectionAttr,
|
||||
interfaces.PropComparator):
|
||||
@ -69,6 +159,9 @@ class QueryableAttribute(interfaces._MappedAttribute,
|
||||
if key in base:
|
||||
self.dispatch._update(base[key].dispatch)
|
||||
|
||||
dispatch = event.dispatcher(events.AttributeEvents)
|
||||
dispatch.dispatch_cls._active_history = False
|
||||
|
||||
@util.memoized_property
|
||||
def _supports_population(self):
|
||||
return self.impl.supports_population
|
||||
@ -98,7 +191,7 @@ class QueryableAttribute(interfaces._MappedAttribute,
|
||||
any other kind of SQL expression other than a :class:`.Column`,
|
||||
the attribute will refer to the :attr:`.MapperProperty.info` dictionary
|
||||
associated directly with the :class:`.ColumnProperty`, assuming the SQL
|
||||
expression itself does not have its own ``.info`` attribute
|
||||
expression itself does not have it's own ``.info`` attribute
|
||||
(which should be the case, unless a user-defined SQL construct
|
||||
has defined one).
|
||||
|
||||
@ -143,18 +236,6 @@ class QueryableAttribute(interfaces._MappedAttribute,
|
||||
def __clause_element__(self):
|
||||
return self.comparator.__clause_element__()
|
||||
|
||||
def _query_clause_element(self):
|
||||
"""like __clause_element__(), but called specifically
|
||||
by :class:`.Query` to allow special behavior."""
|
||||
|
||||
return self.comparator._query_clause_element()
|
||||
|
||||
def adapt_to_entity(self, adapt_to_entity):
|
||||
assert not self._of_type
|
||||
return self.__class__(adapt_to_entity.entity, self.key, impl=self.impl,
|
||||
comparator=self.comparator.adapt_to_entity(adapt_to_entity),
|
||||
parententity=adapt_to_entity)
|
||||
|
||||
def of_type(self, cls):
|
||||
return QueryableAttribute(
|
||||
self.class_,
|
||||
@ -165,7 +246,7 @@ class QueryableAttribute(interfaces._MappedAttribute,
|
||||
of_type=cls)
|
||||
|
||||
def label(self, name):
|
||||
return self._query_clause_element().label(name)
|
||||
return self.__clause_element__().label(name)
|
||||
|
||||
def operate(self, op, *other, **kwargs):
|
||||
return op(self.comparator, *other, **kwargs)
|
||||
@ -205,6 +286,8 @@ class QueryableAttribute(interfaces._MappedAttribute,
|
||||
"""
|
||||
return self.comparator.property
|
||||
|
||||
inspection._self_inspects(QueryableAttribute)
|
||||
|
||||
|
||||
class InstrumentedAttribute(QueryableAttribute):
|
||||
"""Class bound instrumented attribute which adds basic
|
||||
@ -252,14 +335,14 @@ def create_proxied_attribute(descriptor):
|
||||
|
||||
def __init__(self, class_, key, descriptor,
|
||||
comparator,
|
||||
adapt_to_entity=None, doc=None,
|
||||
adapter=None, doc=None,
|
||||
original_property=None):
|
||||
self.class_ = class_
|
||||
self.key = key
|
||||
self.descriptor = descriptor
|
||||
self.original_property = original_property
|
||||
self._comparator = comparator
|
||||
self._adapt_to_entity = adapt_to_entity
|
||||
self.adapter = adapter
|
||||
self.__doc__ = doc
|
||||
|
||||
@property
|
||||
@ -270,15 +353,18 @@ def create_proxied_attribute(descriptor):
|
||||
def comparator(self):
|
||||
if util.callable(self._comparator):
|
||||
self._comparator = self._comparator()
|
||||
if self._adapt_to_entity:
|
||||
self._comparator = self._comparator.adapt_to_entity(
|
||||
self._adapt_to_entity)
|
||||
if self.adapter:
|
||||
self._comparator = self._comparator.adapted(self.adapter)
|
||||
return self._comparator
|
||||
|
||||
def adapt_to_entity(self, adapt_to_entity):
|
||||
return self.__class__(adapt_to_entity.entity, self.key, self.descriptor,
|
||||
def adapted(self, adapter):
|
||||
"""Proxy adapted() for the use case of AliasedClass calling
|
||||
adapted.
|
||||
|
||||
"""
|
||||
return self.__class__(self.class_, self.key, self.descriptor,
|
||||
self._comparator,
|
||||
adapt_to_entity)
|
||||
adapter)
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
if instance is None:
|
||||
@ -300,8 +386,8 @@ def create_proxied_attribute(descriptor):
|
||||
return getattr(self.comparator, attribute)
|
||||
except AttributeError:
|
||||
raise AttributeError(
|
||||
'Neither %r object nor %r object associated with %s '
|
||||
'has an attribute %r' % (
|
||||
'Neither %r object nor %r object associated with %s '
|
||||
'has an attribute %r' % (
|
||||
type(descriptor).__name__,
|
||||
type(self.comparator).__name__,
|
||||
self,
|
||||
@ -315,53 +401,6 @@ def create_proxied_attribute(descriptor):
|
||||
from_instance=descriptor)
|
||||
return Proxy
|
||||
|
||||
OP_REMOVE = util.symbol("REMOVE")
|
||||
OP_APPEND = util.symbol("APPEND")
|
||||
OP_REPLACE = util.symbol("REPLACE")
|
||||
|
||||
class Event(object):
|
||||
"""A token propagated throughout the course of a chain of attribute
|
||||
events.
|
||||
|
||||
Serves as an indicator of the source of the event and also provides
|
||||
a means of controlling propagation across a chain of attribute
|
||||
operations.
|
||||
|
||||
The :class:`.Event` object is sent as the ``initiator`` argument
|
||||
when dealing with the :meth:`.AttributeEvents.append`,
|
||||
:meth:`.AttributeEvents.set`,
|
||||
and :meth:`.AttributeEvents.remove` events.
|
||||
|
||||
The :class:`.Event` object is currently interpreted by the backref
|
||||
event handlers, and is used to control the propagation of operations
|
||||
across two mutually-dependent attributes.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
"""
|
||||
|
||||
impl = None
|
||||
"""The :class:`.AttributeImpl` which is the current event initiator.
|
||||
"""
|
||||
|
||||
op = None
|
||||
"""The symbol :attr:`.OP_APPEND`, :attr:`.OP_REMOVE` or :attr:`.OP_REPLACE`,
|
||||
indicating the source operation.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, attribute_impl, op):
|
||||
self.impl = attribute_impl
|
||||
self.op = op
|
||||
self.parent_token = self.impl.parent_token
|
||||
|
||||
|
||||
@property
|
||||
def key(self):
|
||||
return self.impl.key
|
||||
|
||||
def hasparent(self, state):
|
||||
return self.impl.hasparent(state)
|
||||
|
||||
class AttributeImpl(object):
|
||||
"""internal implementation for instrumented attributes."""
|
||||
@ -370,7 +409,6 @@ class AttributeImpl(object):
|
||||
callable_, dispatch, trackparent=False, extension=None,
|
||||
compare_function=None, active_history=False,
|
||||
parent_token=None, expire_missing=True,
|
||||
send_modified_events=True,
|
||||
**kwargs):
|
||||
"""Construct an AttributeImpl.
|
||||
|
||||
@ -414,10 +452,6 @@ class AttributeImpl(object):
|
||||
during state.expire_attributes(None), if no value is present
|
||||
for this key.
|
||||
|
||||
send_modified_events
|
||||
if False, the InstanceState._modified_event method will have no effect;
|
||||
this means the attribute will never show up as changed in a
|
||||
history entry.
|
||||
"""
|
||||
self.class_ = class_
|
||||
self.key = key
|
||||
@ -425,7 +459,6 @@ class AttributeImpl(object):
|
||||
self.dispatch = dispatch
|
||||
self.trackparent = trackparent
|
||||
self.parent_token = parent_token or self
|
||||
self.send_modified_events = send_modified_events
|
||||
if compare_function is None:
|
||||
self.is_equal = operator.eq
|
||||
else:
|
||||
@ -504,8 +537,8 @@ class AttributeImpl(object):
|
||||
"but the parent record "
|
||||
"has gone stale, can't be sure this "
|
||||
"is the most recent parent." %
|
||||
(state_str(state),
|
||||
state_str(parent_state),
|
||||
(orm_util.state_str(state),
|
||||
orm_util.state_str(parent_state),
|
||||
self.key))
|
||||
|
||||
return
|
||||
@ -558,6 +591,7 @@ class AttributeImpl(object):
|
||||
|
||||
def get(self, state, dict_, passive=PASSIVE_OFF):
|
||||
"""Retrieve a value from the given object.
|
||||
|
||||
If a callable is assembled on this object's attribute, and
|
||||
passive is False, the callable will be executed and the
|
||||
resulting value will be set as the new value for this attribute.
|
||||
@ -652,24 +686,19 @@ class ScalarAttributeImpl(AttributeImpl):
|
||||
old = dict_.get(self.key, NO_VALUE)
|
||||
|
||||
if self.dispatch.remove:
|
||||
self.fire_remove_event(state, dict_, old, self._remove_token)
|
||||
self.fire_remove_event(state, dict_, old, None)
|
||||
state._modified_event(dict_, self, old)
|
||||
del dict_[self.key]
|
||||
|
||||
def get_history(self, state, dict_, passive=PASSIVE_OFF):
|
||||
if self.key in dict_:
|
||||
return History.from_scalar_attribute(self, state, dict_[self.key])
|
||||
else:
|
||||
if passive & INIT_OK:
|
||||
passive ^= INIT_OK
|
||||
current = self.get(state, dict_, passive=passive)
|
||||
if current is PASSIVE_NO_RESULT:
|
||||
return HISTORY_BLANK
|
||||
else:
|
||||
return History.from_scalar_attribute(self, state, current)
|
||||
return History.from_scalar_attribute(
|
||||
self, state, dict_.get(self.key, NO_VALUE))
|
||||
|
||||
def set(self, state, dict_, value, initiator,
|
||||
passive=PASSIVE_OFF, check_old=None, pop=False):
|
||||
if initiator and initiator.parent_token is self.parent_token:
|
||||
return
|
||||
|
||||
if self.dispatch._active_history:
|
||||
old = self.get(state, dict_, PASSIVE_RETURN_NEVER_SET)
|
||||
else:
|
||||
@ -681,26 +710,14 @@ class ScalarAttributeImpl(AttributeImpl):
|
||||
state._modified_event(dict_, self, old)
|
||||
dict_[self.key] = value
|
||||
|
||||
@util.memoized_property
|
||||
def _replace_token(self):
|
||||
return Event(self, OP_REPLACE)
|
||||
|
||||
@util.memoized_property
|
||||
def _append_token(self):
|
||||
return Event(self, OP_REPLACE)
|
||||
|
||||
@util.memoized_property
|
||||
def _remove_token(self):
|
||||
return Event(self, OP_REMOVE)
|
||||
|
||||
def fire_replace_event(self, state, dict_, value, previous, initiator):
|
||||
for fn in self.dispatch.set:
|
||||
value = fn(state, value, previous, initiator or self._replace_token)
|
||||
value = fn(state, value, previous, initiator or self)
|
||||
return value
|
||||
|
||||
def fire_remove_event(self, state, dict_, value, initiator):
|
||||
for fn in self.dispatch.remove:
|
||||
fn(state, value, initiator or self._remove_token)
|
||||
fn(state, value, initiator or self)
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
@ -722,7 +739,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
|
||||
|
||||
def delete(self, state, dict_):
|
||||
old = self.get(state, dict_)
|
||||
self.fire_remove_event(state, dict_, old, self._remove_token)
|
||||
self.fire_remove_event(state, dict_, old, self)
|
||||
del dict_[self.key]
|
||||
|
||||
def get_history(self, state, dict_, passive=PASSIVE_OFF):
|
||||
@ -759,9 +776,16 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
|
||||
passive=PASSIVE_OFF, check_old=None, pop=False):
|
||||
"""Set a value on the given InstanceState.
|
||||
|
||||
`initiator` is the ``InstrumentedAttribute`` that initiated the
|
||||
``set()`` operation and is used to control the depth of a circular
|
||||
setter operation.
|
||||
|
||||
"""
|
||||
if initiator and initiator.parent_token is self.parent_token:
|
||||
return
|
||||
|
||||
if self.dispatch._active_history:
|
||||
old = self.get(state, dict_, passive=PASSIVE_ONLY_PERSISTENT | NO_AUTOFLUSH)
|
||||
old = self.get(state, dict_, passive=PASSIVE_ONLY_PERSISTENT)
|
||||
else:
|
||||
old = self.get(state, dict_, passive=PASSIVE_NO_FETCH)
|
||||
|
||||
@ -773,20 +797,19 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
|
||||
else:
|
||||
raise ValueError(
|
||||
"Object %s not associated with %s on attribute '%s'" % (
|
||||
instance_str(check_old),
|
||||
state_str(state),
|
||||
orm_util.instance_str(check_old),
|
||||
orm_util.state_str(state),
|
||||
self.key
|
||||
))
|
||||
value = self.fire_replace_event(state, dict_, value, old, initiator)
|
||||
dict_[self.key] = value
|
||||
|
||||
|
||||
def fire_remove_event(self, state, dict_, value, initiator):
|
||||
if self.trackparent and value is not None:
|
||||
self.sethasparent(instance_state(value), state, False)
|
||||
|
||||
for fn in self.dispatch.remove:
|
||||
fn(state, value, initiator or self._remove_token)
|
||||
fn(state, value, initiator or self)
|
||||
|
||||
state._modified_event(dict_, self, value)
|
||||
|
||||
@ -798,7 +821,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
|
||||
self.sethasparent(instance_state(previous), state, False)
|
||||
|
||||
for fn in self.dispatch.set:
|
||||
value = fn(state, value, previous, initiator or self._replace_token)
|
||||
value = fn(state, value, previous, initiator or self)
|
||||
|
||||
state._modified_event(dict_, self, previous)
|
||||
|
||||
@ -843,7 +866,7 @@ class CollectionAttributeImpl(AttributeImpl):
|
||||
self.collection_factory = typecallable
|
||||
|
||||
def __copy(self, item):
|
||||
return [y for y in collections.collection_adapter(item)]
|
||||
return [y for y in list(collections.collection_adapter(item))]
|
||||
|
||||
def get_history(self, state, dict_, passive=PASSIVE_OFF):
|
||||
current = self.get(state, dict_, passive=passive)
|
||||
@ -882,17 +905,9 @@ class CollectionAttributeImpl(AttributeImpl):
|
||||
|
||||
return [(instance_state(o), o) for o in current]
|
||||
|
||||
@util.memoized_property
|
||||
def _append_token(self):
|
||||
return Event(self, OP_APPEND)
|
||||
|
||||
@util.memoized_property
|
||||
def _remove_token(self):
|
||||
return Event(self, OP_REMOVE)
|
||||
|
||||
def fire_append_event(self, state, dict_, value, initiator):
|
||||
for fn in self.dispatch.append:
|
||||
value = fn(state, value, initiator or self._append_token)
|
||||
value = fn(state, value, initiator or self)
|
||||
|
||||
state._modified_event(dict_, self, NEVER_SET, True)
|
||||
|
||||
@ -909,7 +924,7 @@ class CollectionAttributeImpl(AttributeImpl):
|
||||
self.sethasparent(instance_state(value), state, False)
|
||||
|
||||
for fn in self.dispatch.remove:
|
||||
fn(state, value, initiator or self._remove_token)
|
||||
fn(state, value, initiator or self)
|
||||
|
||||
state._modified_event(dict_, self, NEVER_SET, True)
|
||||
|
||||
@ -936,6 +951,8 @@ class CollectionAttributeImpl(AttributeImpl):
|
||||
self.key, state, self.collection_factory)
|
||||
|
||||
def append(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
|
||||
if initiator and initiator.parent_token is self.parent_token:
|
||||
return
|
||||
collection = self.get_collection(state, dict_, passive=passive)
|
||||
if collection is PASSIVE_NO_RESULT:
|
||||
value = self.fire_append_event(state, dict_, value, initiator)
|
||||
@ -946,6 +963,9 @@ class CollectionAttributeImpl(AttributeImpl):
|
||||
collection.append_with_event(value, initiator)
|
||||
|
||||
def remove(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
|
||||
if initiator and initiator.parent_token is self.parent_token:
|
||||
return
|
||||
|
||||
collection = self.get_collection(state, state.dict, passive=passive)
|
||||
if collection is PASSIVE_NO_RESULT:
|
||||
self.fire_remove_event(state, dict_, value, initiator)
|
||||
@ -968,8 +988,14 @@ class CollectionAttributeImpl(AttributeImpl):
|
||||
passive=PASSIVE_OFF, pop=False):
|
||||
"""Set a value on the given object.
|
||||
|
||||
`initiator` is the ``InstrumentedAttribute`` that initiated the
|
||||
``set()`` operation and is used to control the depth of a circular
|
||||
setter operation.
|
||||
"""
|
||||
|
||||
if initiator and initiator.parent_token is self.parent_token:
|
||||
return
|
||||
|
||||
self._set_iterable(
|
||||
state, dict_, value,
|
||||
lambda adapter, i: adapter.adapt_like_to_iterable(i))
|
||||
@ -1062,7 +1088,6 @@ def backref_listeners(attribute, key, uselist):
|
||||
# use easily recognizable names for stack traces
|
||||
|
||||
parent_token = attribute.impl.parent_token
|
||||
parent_impl = attribute.impl
|
||||
|
||||
def _acceptable_key_err(child_state, initiator, child_impl):
|
||||
raise ValueError(
|
||||
@ -1070,7 +1095,7 @@ def backref_listeners(attribute, key, uselist):
|
||||
'Passing object %s to attribute "%s" '
|
||||
'triggers a modify event on attribute "%s" '
|
||||
'via the backref "%s".' % (
|
||||
state_str(child_state),
|
||||
orm_util.state_str(child_state),
|
||||
initiator.parent_token,
|
||||
child_impl.parent_token,
|
||||
attribute.impl.parent_token
|
||||
@ -1086,14 +1111,10 @@ def backref_listeners(attribute, key, uselist):
|
||||
old_state, old_dict = instance_state(oldchild),\
|
||||
instance_dict(oldchild)
|
||||
impl = old_state.manager[key].impl
|
||||
|
||||
if initiator.impl is not impl or \
|
||||
initiator.op not in (OP_REPLACE, OP_REMOVE):
|
||||
impl.pop(old_state,
|
||||
old_dict,
|
||||
state.obj(),
|
||||
parent_impl._append_token,
|
||||
passive=PASSIVE_NO_FETCH)
|
||||
impl.pop(old_state,
|
||||
old_dict,
|
||||
state.obj(),
|
||||
initiator, passive=PASSIVE_NO_FETCH)
|
||||
|
||||
if child is not None:
|
||||
child_state, child_dict = instance_state(child),\
|
||||
@ -1102,14 +1123,12 @@ def backref_listeners(attribute, key, uselist):
|
||||
if initiator.parent_token is not parent_token and \
|
||||
initiator.parent_token is not child_impl.parent_token:
|
||||
_acceptable_key_err(state, initiator, child_impl)
|
||||
elif initiator.impl is not child_impl or \
|
||||
initiator.op not in (OP_APPEND, OP_REPLACE):
|
||||
child_impl.append(
|
||||
child_state,
|
||||
child_dict,
|
||||
state.obj(),
|
||||
initiator,
|
||||
passive=PASSIVE_NO_FETCH)
|
||||
child_impl.append(
|
||||
child_state,
|
||||
child_dict,
|
||||
state.obj(),
|
||||
initiator,
|
||||
passive=PASSIVE_NO_FETCH)
|
||||
return child
|
||||
|
||||
def emit_backref_from_collection_append_event(state, child, initiator):
|
||||
@ -1123,9 +1142,7 @@ def backref_listeners(attribute, key, uselist):
|
||||
if initiator.parent_token is not parent_token and \
|
||||
initiator.parent_token is not child_impl.parent_token:
|
||||
_acceptable_key_err(state, initiator, child_impl)
|
||||
elif initiator.impl is not child_impl or \
|
||||
initiator.op not in (OP_APPEND, OP_REPLACE):
|
||||
child_impl.append(
|
||||
child_impl.append(
|
||||
child_state,
|
||||
child_dict,
|
||||
state.obj(),
|
||||
@ -1138,9 +1155,10 @@ def backref_listeners(attribute, key, uselist):
|
||||
child_state, child_dict = instance_state(child),\
|
||||
instance_dict(child)
|
||||
child_impl = child_state.manager[key].impl
|
||||
if initiator.impl is not child_impl or \
|
||||
initiator.op not in (OP_REMOVE, OP_REPLACE):
|
||||
child_impl.pop(
|
||||
# can't think of a path that would produce an initiator
|
||||
# mismatch here, as it would require an existing collection
|
||||
# mismatch.
|
||||
child_impl.pop(
|
||||
child_state,
|
||||
child_dict,
|
||||
state.obj(),
|
||||
@ -1196,9 +1214,8 @@ class History(History):
|
||||
|
||||
"""
|
||||
|
||||
def __bool__(self):
|
||||
def __nonzero__(self):
|
||||
return self != HISTORY_BLANK
|
||||
__nonzero__ = __bool__
|
||||
|
||||
def empty(self):
|
||||
"""Return True if this :class:`.History` has no changes
|
||||
@ -1253,7 +1270,7 @@ class History(History):
|
||||
original = state.committed_state.get(attribute.key, _NO_HISTORY)
|
||||
|
||||
if original is _NO_HISTORY:
|
||||
if current is NEVER_SET:
|
||||
if current is NO_VALUE:
|
||||
return cls((), (), ())
|
||||
else:
|
||||
return cls((), [current], ())
|
||||
@ -1270,7 +1287,7 @@ class History(History):
|
||||
deleted = ()
|
||||
else:
|
||||
deleted = [original]
|
||||
if current is NEVER_SET:
|
||||
if current is NO_VALUE:
|
||||
return cls((), (), deleted)
|
||||
else:
|
||||
return cls([current], (), deleted)
|
||||
|
@ -1,457 +0,0 @@
|
||||
# orm/base.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Constants and rudimental functions used throughout the ORM.
|
||||
|
||||
"""
|
||||
|
||||
from .. import util, inspection, exc as sa_exc
|
||||
from ..sql import expression
|
||||
from . import exc
|
||||
import operator
|
||||
|
||||
PASSIVE_NO_RESULT = util.symbol('PASSIVE_NO_RESULT',
|
||||
"""Symbol returned by a loader callable or other attribute/history
|
||||
retrieval operation when a value could not be determined, based
|
||||
on loader callable flags.
|
||||
"""
|
||||
)
|
||||
|
||||
ATTR_WAS_SET = util.symbol('ATTR_WAS_SET',
|
||||
"""Symbol returned by a loader callable to indicate the
|
||||
retrieved value, or values, were assigned to their attributes
|
||||
on the target object.
|
||||
""")
|
||||
|
||||
ATTR_EMPTY = util.symbol('ATTR_EMPTY',
|
||||
"""Symbol used internally to indicate an attribute had no callable.
|
||||
""")
|
||||
|
||||
NO_VALUE = util.symbol('NO_VALUE',
|
||||
"""Symbol which may be placed as the 'previous' value of an attribute,
|
||||
indicating no value was loaded for an attribute when it was modified,
|
||||
and flags indicated we were not to load it.
|
||||
"""
|
||||
)
|
||||
|
||||
NEVER_SET = util.symbol('NEVER_SET',
|
||||
"""Symbol which may be placed as the 'previous' value of an attribute
|
||||
indicating that the attribute had not been assigned to previously.
|
||||
"""
|
||||
)
|
||||
|
||||
NO_CHANGE = util.symbol("NO_CHANGE",
|
||||
"""No callables or SQL should be emitted on attribute access
|
||||
and no state should change""", canonical=0
|
||||
)
|
||||
|
||||
CALLABLES_OK = util.symbol("CALLABLES_OK",
|
||||
"""Loader callables can be fired off if a value
|
||||
is not present.""", canonical=1
|
||||
)
|
||||
|
||||
SQL_OK = util.symbol("SQL_OK",
|
||||
"""Loader callables can emit SQL at least on scalar value
|
||||
attributes.""", canonical=2)
|
||||
|
||||
RELATED_OBJECT_OK = util.symbol("RELATED_OBJECT_OK",
|
||||
"""callables can use SQL to load related objects as well
|
||||
as scalar value attributes.
|
||||
""", canonical=4
|
||||
)
|
||||
|
||||
INIT_OK = util.symbol("INIT_OK",
|
||||
"""Attributes should be initialized with a blank
|
||||
value (None or an empty collection) upon get, if no other
|
||||
value can be obtained.
|
||||
""", canonical=8
|
||||
)
|
||||
|
||||
NON_PERSISTENT_OK = util.symbol("NON_PERSISTENT_OK",
|
||||
"""callables can be emitted if the parent is not persistent.""",
|
||||
canonical=16
|
||||
)
|
||||
|
||||
LOAD_AGAINST_COMMITTED = util.symbol("LOAD_AGAINST_COMMITTED",
|
||||
"""callables should use committed values as primary/foreign keys during a load
|
||||
""", canonical=32
|
||||
)
|
||||
|
||||
NO_AUTOFLUSH = util.symbol("NO_AUTOFLUSH",
|
||||
"""loader callables should disable autoflush.
|
||||
""", canonical=64)
|
||||
|
||||
# pre-packaged sets of flags used as inputs
|
||||
PASSIVE_OFF = util.symbol("PASSIVE_OFF",
|
||||
"Callables can be emitted in all cases.",
|
||||
canonical=(RELATED_OBJECT_OK | NON_PERSISTENT_OK |
|
||||
INIT_OK | CALLABLES_OK | SQL_OK)
|
||||
)
|
||||
PASSIVE_RETURN_NEVER_SET = util.symbol("PASSIVE_RETURN_NEVER_SET",
|
||||
"""PASSIVE_OFF ^ INIT_OK""",
|
||||
canonical=PASSIVE_OFF ^ INIT_OK
|
||||
)
|
||||
PASSIVE_NO_INITIALIZE = util.symbol("PASSIVE_NO_INITIALIZE",
|
||||
"PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK",
|
||||
canonical=PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK
|
||||
)
|
||||
PASSIVE_NO_FETCH = util.symbol("PASSIVE_NO_FETCH",
|
||||
"PASSIVE_OFF ^ SQL_OK",
|
||||
canonical=PASSIVE_OFF ^ SQL_OK
|
||||
)
|
||||
PASSIVE_NO_FETCH_RELATED = util.symbol("PASSIVE_NO_FETCH_RELATED",
|
||||
"PASSIVE_OFF ^ RELATED_OBJECT_OK",
|
||||
canonical=PASSIVE_OFF ^ RELATED_OBJECT_OK
|
||||
)
|
||||
PASSIVE_ONLY_PERSISTENT = util.symbol("PASSIVE_ONLY_PERSISTENT",
|
||||
"PASSIVE_OFF ^ NON_PERSISTENT_OK",
|
||||
canonical=PASSIVE_OFF ^ NON_PERSISTENT_OK
|
||||
)
|
||||
|
||||
DEFAULT_MANAGER_ATTR = '_sa_class_manager'
|
||||
DEFAULT_STATE_ATTR = '_sa_instance_state'
|
||||
_INSTRUMENTOR = ('mapper', 'instrumentor')
|
||||
|
||||
EXT_CONTINUE = util.symbol('EXT_CONTINUE')
|
||||
EXT_STOP = util.symbol('EXT_STOP')
|
||||
|
||||
ONETOMANY = util.symbol('ONETOMANY',
|
||||
"""Indicates the one-to-many direction for a :func:`.relationship`.
|
||||
|
||||
This symbol is typically used by the internals but may be exposed within
|
||||
certain API features.
|
||||
|
||||
""")
|
||||
|
||||
MANYTOONE = util.symbol('MANYTOONE',
|
||||
"""Indicates the many-to-one direction for a :func:`.relationship`.
|
||||
|
||||
This symbol is typically used by the internals but may be exposed within
|
||||
certain API features.
|
||||
|
||||
""")
|
||||
|
||||
MANYTOMANY = util.symbol('MANYTOMANY',
|
||||
"""Indicates the many-to-many direction for a :func:`.relationship`.
|
||||
|
||||
This symbol is typically used by the internals but may be exposed within
|
||||
certain API features.
|
||||
|
||||
""")
|
||||
|
||||
NOT_EXTENSION = util.symbol('NOT_EXTENSION',
|
||||
"""Symbol indicating an :class:`_InspectionAttr` that's
|
||||
not part of sqlalchemy.ext.
|
||||
|
||||
Is assigned to the :attr:`._InspectionAttr.extension_type`
|
||||
attibute.
|
||||
|
||||
""")
|
||||
|
||||
_none_set = frozenset([None])
|
||||
|
||||
|
||||
def _generative(*assertions):
|
||||
"""Mark a method as generative, e.g. method-chained."""
|
||||
|
||||
@util.decorator
|
||||
def generate(fn, *args, **kw):
|
||||
self = args[0]._clone()
|
||||
for assertion in assertions:
|
||||
assertion(self, fn.__name__)
|
||||
fn(self, *args[1:], **kw)
|
||||
return self
|
||||
return generate
|
||||
|
||||
|
||||
# these can be replaced by sqlalchemy.ext.instrumentation
|
||||
# if augmented class instrumentation is enabled.
|
||||
def manager_of_class(cls):
|
||||
return cls.__dict__.get(DEFAULT_MANAGER_ATTR, None)
|
||||
|
||||
instance_state = operator.attrgetter(DEFAULT_STATE_ATTR)
|
||||
|
||||
instance_dict = operator.attrgetter('__dict__')
|
||||
|
||||
def instance_str(instance):
|
||||
"""Return a string describing an instance."""
|
||||
|
||||
return state_str(instance_state(instance))
|
||||
|
||||
def state_str(state):
|
||||
"""Return a string describing an instance via its InstanceState."""
|
||||
|
||||
if state is None:
|
||||
return "None"
|
||||
else:
|
||||
return '<%s at 0x%x>' % (state.class_.__name__, id(state.obj()))
|
||||
|
||||
def state_class_str(state):
|
||||
"""Return a string describing an instance's class via its InstanceState."""
|
||||
|
||||
if state is None:
|
||||
return "None"
|
||||
else:
|
||||
return '<%s>' % (state.class_.__name__, )
|
||||
|
||||
|
||||
def attribute_str(instance, attribute):
|
||||
return instance_str(instance) + "." + attribute
|
||||
|
||||
|
||||
def state_attribute_str(state, attribute):
|
||||
return state_str(state) + "." + attribute
|
||||
|
||||
def object_mapper(instance):
|
||||
"""Given an object, return the primary Mapper associated with the object
|
||||
instance.
|
||||
|
||||
Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
|
||||
if no mapping is configured.
|
||||
|
||||
This function is available via the inspection system as::
|
||||
|
||||
inspect(instance).mapper
|
||||
|
||||
Using the inspection system will raise
|
||||
:class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
|
||||
not part of a mapping.
|
||||
|
||||
"""
|
||||
return object_state(instance).mapper
|
||||
|
||||
|
||||
def object_state(instance):
|
||||
"""Given an object, return the :class:`.InstanceState`
|
||||
associated with the object.
|
||||
|
||||
Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
|
||||
if no mapping is configured.
|
||||
|
||||
Equivalent functionality is available via the :func:`.inspect`
|
||||
function as::
|
||||
|
||||
inspect(instance)
|
||||
|
||||
Using the inspection system will raise
|
||||
:class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
|
||||
not part of a mapping.
|
||||
|
||||
"""
|
||||
state = _inspect_mapped_object(instance)
|
||||
if state is None:
|
||||
raise exc.UnmappedInstanceError(instance)
|
||||
else:
|
||||
return state
|
||||
|
||||
|
||||
@inspection._inspects(object)
|
||||
def _inspect_mapped_object(instance):
|
||||
try:
|
||||
return instance_state(instance)
|
||||
# TODO: whats the py-2/3 syntax to catch two
|
||||
# different kinds of exceptions at once ?
|
||||
except exc.UnmappedClassError:
|
||||
return None
|
||||
except exc.NO_STATE:
|
||||
return None
|
||||
|
||||
|
||||
|
||||
def _class_to_mapper(class_or_mapper):
|
||||
insp = inspection.inspect(class_or_mapper, False)
|
||||
if insp is not None:
|
||||
return insp.mapper
|
||||
else:
|
||||
raise exc.UnmappedClassError(class_or_mapper)
|
||||
|
||||
|
||||
def _mapper_or_none(entity):
|
||||
"""Return the :class:`.Mapper` for the given class or None if the
|
||||
class is not mapped."""
|
||||
|
||||
insp = inspection.inspect(entity, False)
|
||||
if insp is not None:
|
||||
return insp.mapper
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def _is_mapped_class(entity):
|
||||
"""Return True if the given object is a mapped class,
|
||||
:class:`.Mapper`, or :class:`.AliasedClass`."""
|
||||
|
||||
insp = inspection.inspect(entity, False)
|
||||
return insp is not None and \
|
||||
hasattr(insp, "mapper") and \
|
||||
(
|
||||
insp.is_mapper
|
||||
or insp.is_aliased_class
|
||||
)
|
||||
|
||||
def _attr_as_key(attr):
|
||||
if hasattr(attr, 'key'):
|
||||
return attr.key
|
||||
else:
|
||||
return expression._column_as_key(attr)
|
||||
|
||||
|
||||
|
||||
def _orm_columns(entity):
|
||||
insp = inspection.inspect(entity, False)
|
||||
if hasattr(insp, 'selectable'):
|
||||
return [c for c in insp.selectable.c]
|
||||
else:
|
||||
return [entity]
|
||||
|
||||
|
||||
|
||||
def _is_aliased_class(entity):
|
||||
insp = inspection.inspect(entity, False)
|
||||
return insp is not None and \
|
||||
getattr(insp, "is_aliased_class", False)
|
||||
|
||||
|
||||
def _entity_descriptor(entity, key):
|
||||
"""Return a class attribute given an entity and string name.
|
||||
|
||||
May return :class:`.InstrumentedAttribute` or user-defined
|
||||
attribute.
|
||||
|
||||
"""
|
||||
insp = inspection.inspect(entity)
|
||||
if insp.is_selectable:
|
||||
description = entity
|
||||
entity = insp.c
|
||||
elif insp.is_aliased_class:
|
||||
entity = insp.entity
|
||||
description = entity
|
||||
elif hasattr(insp, "mapper"):
|
||||
description = entity = insp.mapper.class_
|
||||
else:
|
||||
description = entity
|
||||
|
||||
try:
|
||||
return getattr(entity, key)
|
||||
except AttributeError:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Entity '%s' has no property '%s'" %
|
||||
(description, key)
|
||||
)
|
||||
|
||||
_state_mapper = util.dottedgetter('manager.mapper')
|
||||
|
||||
@inspection._inspects(type)
|
||||
def _inspect_mapped_class(class_, configure=False):
|
||||
try:
|
||||
class_manager = manager_of_class(class_)
|
||||
if not class_manager.is_mapped:
|
||||
return None
|
||||
mapper = class_manager.mapper
|
||||
if configure and mapper._new_mappers:
|
||||
mapper._configure_all()
|
||||
return mapper
|
||||
|
||||
except exc.NO_STATE:
|
||||
return None
|
||||
|
||||
def class_mapper(class_, configure=True):
|
||||
"""Given a class, return the primary :class:`.Mapper` associated
|
||||
with the key.
|
||||
|
||||
Raises :exc:`.UnmappedClassError` if no mapping is configured
|
||||
on the given class, or :exc:`.ArgumentError` if a non-class
|
||||
object is passed.
|
||||
|
||||
Equivalent functionality is available via the :func:`.inspect`
|
||||
function as::
|
||||
|
||||
inspect(some_mapped_class)
|
||||
|
||||
Using the inspection system will raise
|
||||
:class:`sqlalchemy.exc.NoInspectionAvailable` if the class is not mapped.
|
||||
|
||||
"""
|
||||
mapper = _inspect_mapped_class(class_, configure=configure)
|
||||
if mapper is None:
|
||||
if not isinstance(class_, type):
|
||||
raise sa_exc.ArgumentError(
|
||||
"Class object expected, got '%r'." % (class_, ))
|
||||
raise exc.UnmappedClassError(class_)
|
||||
else:
|
||||
return mapper
|
||||
|
||||
|
||||
class _InspectionAttr(object):
|
||||
"""A base class applied to all ORM objects that can be returned
|
||||
by the :func:`.inspect` function.
|
||||
|
||||
The attributes defined here allow the usage of simple boolean
|
||||
checks to test basic facts about the object returned.
|
||||
|
||||
While the boolean checks here are basically the same as using
|
||||
the Python isinstance() function, the flags here can be used without
|
||||
the need to import all of these classes, and also such that
|
||||
the SQLAlchemy class system can change while leaving the flags
|
||||
here intact for forwards-compatibility.
|
||||
|
||||
"""
|
||||
|
||||
is_selectable = False
|
||||
"""Return True if this object is an instance of :class:`.Selectable`."""
|
||||
|
||||
is_aliased_class = False
|
||||
"""True if this object is an instance of :class:`.AliasedClass`."""
|
||||
|
||||
is_instance = False
|
||||
"""True if this object is an instance of :class:`.InstanceState`."""
|
||||
|
||||
is_mapper = False
|
||||
"""True if this object is an instance of :class:`.Mapper`."""
|
||||
|
||||
is_property = False
|
||||
"""True if this object is an instance of :class:`.MapperProperty`."""
|
||||
|
||||
is_attribute = False
|
||||
"""True if this object is a Python :term:`descriptor`.
|
||||
|
||||
This can refer to one of many types. Usually a
|
||||
:class:`.QueryableAttribute` which handles attributes events on behalf
|
||||
of a :class:`.MapperProperty`. But can also be an extension type
|
||||
such as :class:`.AssociationProxy` or :class:`.hybrid_property`.
|
||||
The :attr:`._InspectionAttr.extension_type` will refer to a constant
|
||||
identifying the specific subtype.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Mapper.all_orm_descriptors`
|
||||
|
||||
"""
|
||||
|
||||
is_clause_element = False
|
||||
"""True if this object is an instance of :class:`.ClauseElement`."""
|
||||
|
||||
extension_type = NOT_EXTENSION
|
||||
"""The extension type, if any.
|
||||
Defaults to :data:`.interfaces.NOT_EXTENSION`
|
||||
|
||||
.. versionadded:: 0.8.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:data:`.HYBRID_METHOD`
|
||||
|
||||
:data:`.HYBRID_PROPERTY`
|
||||
|
||||
:data:`.ASSOCIATION_PROXY`
|
||||
|
||||
"""
|
||||
|
||||
class _MappedAttribute(object):
|
||||
"""Mixin for attributes which should be replaced by mapper-assigned
|
||||
attributes.
|
||||
|
||||
"""
|
@ -108,7 +108,8 @@ import weakref
|
||||
|
||||
from ..sql import expression
|
||||
from .. import util, exc as sa_exc
|
||||
from . import base
|
||||
orm_util = util.importlater("sqlalchemy.orm", "util")
|
||||
attributes = util.importlater("sqlalchemy.orm", "attributes")
|
||||
|
||||
|
||||
__all__ = ['collection', 'collection_adapter',
|
||||
@ -138,8 +139,8 @@ class _PlainColumnGetter(object):
|
||||
return self.cols
|
||||
|
||||
def __call__(self, value):
|
||||
state = base.instance_state(value)
|
||||
m = base._state_mapper(state)
|
||||
state = attributes.instance_state(value)
|
||||
m = orm_util._state_mapper(state)
|
||||
|
||||
key = [
|
||||
m._get_state_attr_by_column(state, state.dict, col)
|
||||
@ -166,8 +167,8 @@ class _SerializableColumnGetter(object):
|
||||
return _SerializableColumnGetter, (self.colkeys,)
|
||||
|
||||
def __call__(self, value):
|
||||
state = base.instance_state(value)
|
||||
m = base._state_mapper(state)
|
||||
state = attributes.instance_state(value)
|
||||
m = orm_util._state_mapper(state)
|
||||
key = [m._get_state_attr_by_column(
|
||||
state, state.dict,
|
||||
m.mapped_table.columns[k])
|
||||
@ -351,7 +352,7 @@ class collection(object):
|
||||
promulgation to collection events.
|
||||
|
||||
"""
|
||||
fn._sa_instrument_role = 'appender'
|
||||
setattr(fn, '_sa_instrument_role', 'appender')
|
||||
return fn
|
||||
|
||||
@staticmethod
|
||||
@ -378,7 +379,7 @@ class collection(object):
|
||||
promulgation to collection events.
|
||||
|
||||
"""
|
||||
fn._sa_instrument_role = 'remover'
|
||||
setattr(fn, '_sa_instrument_role', 'remover')
|
||||
return fn
|
||||
|
||||
@staticmethod
|
||||
@ -392,7 +393,7 @@ class collection(object):
|
||||
def __iter__(self): ...
|
||||
|
||||
"""
|
||||
fn._sa_instrument_role = 'iterator'
|
||||
setattr(fn, '_sa_instrument_role', 'iterator')
|
||||
return fn
|
||||
|
||||
@staticmethod
|
||||
@ -413,7 +414,7 @@ class collection(object):
|
||||
def extend(self, items): ...
|
||||
|
||||
"""
|
||||
fn._sa_instrumented = True
|
||||
setattr(fn, '_sa_instrumented', True)
|
||||
return fn
|
||||
|
||||
@staticmethod
|
||||
@ -427,7 +428,7 @@ class collection(object):
|
||||
that has been linked, or None if unlinking.
|
||||
|
||||
"""
|
||||
fn._sa_instrument_role = 'linker'
|
||||
setattr(fn, '_sa_instrument_role', 'linker')
|
||||
return fn
|
||||
|
||||
link = linker
|
||||
@ -463,7 +464,7 @@ class collection(object):
|
||||
validation on the values about to be assigned.
|
||||
|
||||
"""
|
||||
fn._sa_instrument_role = 'converter'
|
||||
setattr(fn, '_sa_instrument_role', 'converter')
|
||||
return fn
|
||||
|
||||
@staticmethod
|
||||
@ -483,7 +484,7 @@ class collection(object):
|
||||
|
||||
"""
|
||||
def decorator(fn):
|
||||
fn._sa_instrument_before = ('fire_append_event', arg)
|
||||
setattr(fn, '_sa_instrument_before', ('fire_append_event', arg))
|
||||
return fn
|
||||
return decorator
|
||||
|
||||
@ -503,8 +504,8 @@ class collection(object):
|
||||
|
||||
"""
|
||||
def decorator(fn):
|
||||
fn._sa_instrument_before = ('fire_append_event', arg)
|
||||
fn._sa_instrument_after = 'fire_remove_event'
|
||||
setattr(fn, '_sa_instrument_before', ('fire_append_event', arg))
|
||||
setattr(fn, '_sa_instrument_after', 'fire_remove_event')
|
||||
return fn
|
||||
return decorator
|
||||
|
||||
@ -525,7 +526,7 @@ class collection(object):
|
||||
|
||||
"""
|
||||
def decorator(fn):
|
||||
fn._sa_instrument_before = ('fire_remove_event', arg)
|
||||
setattr(fn, '_sa_instrument_before', ('fire_remove_event', arg))
|
||||
return fn
|
||||
return decorator
|
||||
|
||||
@ -545,13 +546,32 @@ class collection(object):
|
||||
|
||||
"""
|
||||
def decorator(fn):
|
||||
fn._sa_instrument_after = 'fire_remove_event'
|
||||
setattr(fn, '_sa_instrument_after', 'fire_remove_event')
|
||||
return fn
|
||||
return decorator
|
||||
|
||||
|
||||
collection_adapter = operator.attrgetter('_sa_adapter')
|
||||
"""Fetch the :class:`.CollectionAdapter` for a collection."""
|
||||
# public instrumentation interface for 'internally instrumented'
|
||||
# implementations
|
||||
def collection_adapter(collection):
|
||||
"""Fetch the :class:`.CollectionAdapter` for a collection."""
|
||||
|
||||
return getattr(collection, '_sa_adapter', None)
|
||||
|
||||
|
||||
def collection_iter(collection):
|
||||
"""Iterate over an object supporting the @iterator or __iter__ protocols.
|
||||
|
||||
If the collection is an ORM collection, it need not be attached to an
|
||||
object to be iterable.
|
||||
|
||||
"""
|
||||
try:
|
||||
return getattr(collection, '_sa_iterator',
|
||||
getattr(collection, '__iter__'))()
|
||||
except AttributeError:
|
||||
raise TypeError("'%s' object is not iterable" %
|
||||
type(collection).__name__)
|
||||
|
||||
|
||||
class CollectionAdapter(object):
|
||||
@ -564,6 +584,8 @@ class CollectionAdapter(object):
|
||||
The ORM uses :class:`.CollectionAdapter` exclusively for interaction with
|
||||
entity collections.
|
||||
|
||||
The usage of getattr()/setattr() is currently to allow injection
|
||||
of custom methods, such as to unwrap Zope security proxies.
|
||||
|
||||
"""
|
||||
invalidated = False
|
||||
@ -587,19 +609,16 @@ class CollectionAdapter(object):
|
||||
return self.owner_state.manager[self._key].impl
|
||||
|
||||
def link_to_self(self, data):
|
||||
"""Link a collection to this adapter"""
|
||||
|
||||
data._sa_adapter = self
|
||||
if data._sa_linker:
|
||||
data._sa_linker(self)
|
||||
|
||||
"""Link a collection to this adapter, and fire a link event."""
|
||||
setattr(data, '_sa_adapter', self)
|
||||
if hasattr(data, '_sa_linker'):
|
||||
getattr(data, '_sa_linker')(self)
|
||||
|
||||
def unlink(self, data):
|
||||
"""Unlink a collection from any adapter"""
|
||||
|
||||
del data._sa_adapter
|
||||
if data._sa_linker:
|
||||
data._sa_linker(None)
|
||||
"""Unlink a collection from any adapter, and fire a link event."""
|
||||
setattr(data, '_sa_adapter', None)
|
||||
if hasattr(data, '_sa_linker'):
|
||||
getattr(data, '_sa_linker')(None)
|
||||
|
||||
def adapt_like_to_iterable(self, obj):
|
||||
"""Converts collection-compatible objects to an iterable of values.
|
||||
@ -615,7 +634,7 @@ class CollectionAdapter(object):
|
||||
a default duck-typing-based implementation is used.
|
||||
|
||||
"""
|
||||
converter = self._data()._sa_converter
|
||||
converter = getattr(self._data(), '_sa_converter', None)
|
||||
if converter is not None:
|
||||
return converter(obj)
|
||||
|
||||
@ -636,66 +655,66 @@ class CollectionAdapter(object):
|
||||
# If the object is an adapted collection, return the (iterable)
|
||||
# adapter.
|
||||
if getattr(obj, '_sa_adapter', None) is not None:
|
||||
return obj._sa_adapter
|
||||
return getattr(obj, '_sa_adapter')
|
||||
elif setting_type == dict:
|
||||
if util.py3k:
|
||||
return obj.values()
|
||||
else:
|
||||
return getattr(obj, 'itervalues', obj.values)()
|
||||
# Py3K
|
||||
#return obj.values()
|
||||
# Py2K
|
||||
return getattr(obj, 'itervalues', getattr(obj, 'values'))()
|
||||
# end Py2K
|
||||
else:
|
||||
return iter(obj)
|
||||
|
||||
def append_with_event(self, item, initiator=None):
|
||||
"""Add an entity to the collection, firing mutation events."""
|
||||
|
||||
self._data()._sa_appender(item, _sa_initiator=initiator)
|
||||
getattr(self._data(), '_sa_appender')(item, _sa_initiator=initiator)
|
||||
|
||||
def append_without_event(self, item):
|
||||
"""Add or restore an entity to the collection, firing no events."""
|
||||
self._data()._sa_appender(item, _sa_initiator=False)
|
||||
getattr(self._data(), '_sa_appender')(item, _sa_initiator=False)
|
||||
|
||||
def append_multiple_without_event(self, items):
|
||||
"""Add or restore an entity to the collection, firing no events."""
|
||||
appender = self._data()._sa_appender
|
||||
appender = getattr(self._data(), '_sa_appender')
|
||||
for item in items:
|
||||
appender(item, _sa_initiator=False)
|
||||
|
||||
def remove_with_event(self, item, initiator=None):
|
||||
"""Remove an entity from the collection, firing mutation events."""
|
||||
self._data()._sa_remover(item, _sa_initiator=initiator)
|
||||
getattr(self._data(), '_sa_remover')(item, _sa_initiator=initiator)
|
||||
|
||||
def remove_without_event(self, item):
|
||||
"""Remove an entity from the collection, firing no events."""
|
||||
self._data()._sa_remover(item, _sa_initiator=False)
|
||||
getattr(self._data(), '_sa_remover')(item, _sa_initiator=False)
|
||||
|
||||
def clear_with_event(self, initiator=None):
|
||||
"""Empty the collection, firing a mutation event for each entity."""
|
||||
|
||||
remover = self._data()._sa_remover
|
||||
remover = getattr(self._data(), '_sa_remover')
|
||||
for item in list(self):
|
||||
remover(item, _sa_initiator=initiator)
|
||||
|
||||
def clear_without_event(self):
|
||||
"""Empty the collection, firing no events."""
|
||||
|
||||
remover = self._data()._sa_remover
|
||||
remover = getattr(self._data(), '_sa_remover')
|
||||
for item in list(self):
|
||||
remover(item, _sa_initiator=False)
|
||||
|
||||
def __iter__(self):
|
||||
"""Iterate over entities in the collection."""
|
||||
|
||||
return iter(self._data()._sa_iterator())
|
||||
# Py3K requires iter() here
|
||||
return iter(getattr(self._data(), '_sa_iterator')())
|
||||
|
||||
def __len__(self):
|
||||
"""Count entities in the collection."""
|
||||
return len(list(self._data()._sa_iterator()))
|
||||
return len(list(getattr(self._data(), '_sa_iterator')()))
|
||||
|
||||
def __bool__(self):
|
||||
def __nonzero__(self):
|
||||
return True
|
||||
|
||||
__nonzero__ = __bool__
|
||||
|
||||
def fire_append_event(self, item, initiator=None):
|
||||
"""Notify that a entity has entered the collection.
|
||||
|
||||
@ -941,12 +960,7 @@ def _instrument_class(cls):
|
||||
for role, method_name in roles.items():
|
||||
setattr(cls, '_sa_%s' % role, getattr(cls, method_name))
|
||||
|
||||
cls._sa_adapter = None
|
||||
if not hasattr(cls, '_sa_linker'):
|
||||
cls._sa_linker = None
|
||||
if not hasattr(cls, '_sa_converter'):
|
||||
cls._sa_converter = None
|
||||
cls._sa_instrumented = id(cls)
|
||||
setattr(cls, '_sa_instrumented', id(cls))
|
||||
|
||||
|
||||
def _instrument_membership_mutator(method, before, argument, after):
|
||||
@ -985,7 +999,7 @@ def _instrument_membership_mutator(method, before, argument, after):
|
||||
if initiator is False:
|
||||
executor = None
|
||||
else:
|
||||
executor = args[0]._sa_adapter
|
||||
executor = getattr(args[0], '_sa_adapter', None)
|
||||
|
||||
if before and executor:
|
||||
getattr(executor, before)(value, initiator)
|
||||
@ -1010,33 +1024,33 @@ def __set(collection, item, _sa_initiator=None):
|
||||
"""Run set events, may eventually be inlined into decorators."""
|
||||
|
||||
if _sa_initiator is not False:
|
||||
executor = collection._sa_adapter
|
||||
executor = getattr(collection, '_sa_adapter', None)
|
||||
if executor:
|
||||
item = executor.fire_append_event(item, _sa_initiator)
|
||||
item = getattr(executor, 'fire_append_event')(item, _sa_initiator)
|
||||
return item
|
||||
|
||||
|
||||
def __del(collection, item, _sa_initiator=None):
|
||||
"""Run del events, may eventually be inlined into decorators."""
|
||||
if _sa_initiator is not False:
|
||||
executor = collection._sa_adapter
|
||||
executor = getattr(collection, '_sa_adapter', None)
|
||||
if executor:
|
||||
executor.fire_remove_event(item, _sa_initiator)
|
||||
getattr(executor, 'fire_remove_event')(item, _sa_initiator)
|
||||
|
||||
|
||||
def __before_delete(collection, _sa_initiator=None):
|
||||
"""Special method to run 'commit existing value' methods"""
|
||||
executor = collection._sa_adapter
|
||||
executor = getattr(collection, '_sa_adapter', None)
|
||||
if executor:
|
||||
executor.fire_pre_remove_event(_sa_initiator)
|
||||
getattr(executor, 'fire_pre_remove_event')(_sa_initiator)
|
||||
|
||||
|
||||
def _list_decorators():
|
||||
"""Tailored instrumentation wrappers for any list-like class."""
|
||||
|
||||
def _tidy(fn):
|
||||
fn._sa_instrumented = True
|
||||
fn.__doc__ = getattr(list, fn.__name__).__doc__
|
||||
setattr(fn, '_sa_instrumented', True)
|
||||
fn.__doc__ = getattr(getattr(list, fn.__name__), '__doc__')
|
||||
|
||||
def append(fn):
|
||||
def append(self, item, _sa_initiator=None):
|
||||
@ -1083,14 +1097,14 @@ def _list_decorators():
|
||||
stop += len(self)
|
||||
|
||||
if step == 1:
|
||||
for i in range(start, stop, step):
|
||||
for i in xrange(start, stop, step):
|
||||
if len(self) > start:
|
||||
del self[start]
|
||||
|
||||
for i, item in enumerate(value):
|
||||
self.insert(i + start, item)
|
||||
else:
|
||||
rng = list(range(start, stop, step))
|
||||
rng = range(start, stop, step)
|
||||
if len(value) != len(rng):
|
||||
raise ValueError(
|
||||
"attempt to assign sequence of size %s to "
|
||||
@ -1117,23 +1131,24 @@ def _list_decorators():
|
||||
_tidy(__delitem__)
|
||||
return __delitem__
|
||||
|
||||
if util.py2k:
|
||||
def __setslice__(fn):
|
||||
def __setslice__(self, start, end, values):
|
||||
for value in self[start:end]:
|
||||
__del(self, value)
|
||||
values = [__set(self, value) for value in values]
|
||||
fn(self, start, end, values)
|
||||
_tidy(__setslice__)
|
||||
return __setslice__
|
||||
# Py2K
|
||||
def __setslice__(fn):
|
||||
def __setslice__(self, start, end, values):
|
||||
for value in self[start:end]:
|
||||
__del(self, value)
|
||||
values = [__set(self, value) for value in values]
|
||||
fn(self, start, end, values)
|
||||
_tidy(__setslice__)
|
||||
return __setslice__
|
||||
|
||||
def __delslice__(fn):
|
||||
def __delslice__(self, start, end):
|
||||
for value in self[start:end]:
|
||||
__del(self, value)
|
||||
fn(self, start, end)
|
||||
_tidy(__delslice__)
|
||||
return __delslice__
|
||||
def __delslice__(fn):
|
||||
def __delslice__(self, start, end):
|
||||
for value in self[start:end]:
|
||||
__del(self, value)
|
||||
fn(self, start, end)
|
||||
_tidy(__delslice__)
|
||||
return __delslice__
|
||||
# end Py2K
|
||||
|
||||
def extend(fn):
|
||||
def extend(self, iterable):
|
||||
@ -1161,15 +1176,6 @@ def _list_decorators():
|
||||
_tidy(pop)
|
||||
return pop
|
||||
|
||||
if not util.py2k:
|
||||
def clear(fn):
|
||||
def clear(self, index=-1):
|
||||
for item in self:
|
||||
__del(self, item)
|
||||
fn(self)
|
||||
_tidy(clear)
|
||||
return clear
|
||||
|
||||
# __imul__ : not wrapping this. all members of the collection are already
|
||||
# present, so no need to fire appends... wrapping it with an explicit
|
||||
# decorator is still possible, so events on *= can be had if they're
|
||||
@ -1184,8 +1190,8 @@ def _dict_decorators():
|
||||
"""Tailored instrumentation wrappers for any dict-like mapping class."""
|
||||
|
||||
def _tidy(fn):
|
||||
fn._sa_instrumented = True
|
||||
fn.__doc__ = getattr(dict, fn.__name__).__doc__
|
||||
setattr(fn, '_sa_instrumented', True)
|
||||
fn.__doc__ = getattr(getattr(dict, fn.__name__), '__doc__')
|
||||
|
||||
Unspecified = util.symbol('Unspecified')
|
||||
|
||||
@ -1248,7 +1254,7 @@ def _dict_decorators():
|
||||
def update(self, __other=Unspecified, **kw):
|
||||
if __other is not Unspecified:
|
||||
if hasattr(__other, 'keys'):
|
||||
for key in list(__other):
|
||||
for key in __other.keys():
|
||||
if (key not in self or
|
||||
self[key] is not __other[key]):
|
||||
self[key] = __other[key]
|
||||
@ -1267,7 +1273,11 @@ def _dict_decorators():
|
||||
l.pop('Unspecified')
|
||||
return l
|
||||
|
||||
_set_binop_bases = (set, frozenset)
|
||||
if util.py3k_warning:
|
||||
_set_binop_bases = (set, frozenset)
|
||||
else:
|
||||
import sets
|
||||
_set_binop_bases = (set, frozenset, sets.BaseSet)
|
||||
|
||||
|
||||
def _set_binops_check_strict(self, obj):
|
||||
@ -1286,8 +1296,8 @@ def _set_decorators():
|
||||
"""Tailored instrumentation wrappers for any set-like class."""
|
||||
|
||||
def _tidy(fn):
|
||||
fn._sa_instrumented = True
|
||||
fn.__doc__ = getattr(set, fn.__name__).__doc__
|
||||
setattr(fn, '_sa_instrumented', True)
|
||||
fn.__doc__ = getattr(getattr(set, fn.__name__), '__doc__')
|
||||
|
||||
Unspecified = util.symbol('Unspecified')
|
||||
|
||||
@ -1460,8 +1470,11 @@ __interfaces = {
|
||||
),
|
||||
|
||||
# decorators are required for dicts and object collections.
|
||||
dict: ({'iterator': 'values'}, _dict_decorators()) if util.py3k
|
||||
else ({'iterator': 'itervalues'}, _dict_decorators()),
|
||||
# Py3K
|
||||
#dict: ({'iterator': 'values'}, _dict_decorators()),
|
||||
# Py2K
|
||||
dict: ({'iterator': 'itervalues'}, _dict_decorators()),
|
||||
# end Py2K
|
||||
}
|
||||
|
||||
|
||||
|
@ -7,7 +7,7 @@
|
||||
from .. import event, util
|
||||
from .interfaces import EXT_CONTINUE
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces")
|
||||
|
||||
class MapperExtension(object):
|
||||
"""Base implementation for :class:`.Mapper` event hooks.
|
||||
|
||||
@ -374,7 +374,6 @@ class MapperExtension(object):
|
||||
return EXT_CONTINUE
|
||||
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces")
|
||||
class SessionExtension(object):
|
||||
|
||||
"""Base implementation for :class:`.Session` event hooks.
|
||||
@ -495,7 +494,6 @@ class SessionExtension(object):
|
||||
"""
|
||||
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces")
|
||||
class AttributeExtension(object):
|
||||
"""Base implementation for :class:`.AttributeImpl` event hooks, events
|
||||
that fire upon attribute mutations in user code.
|
||||
|
@ -12,11 +12,10 @@ as actively in the load/persist ORM loop.
|
||||
|
||||
from .interfaces import MapperProperty, PropComparator
|
||||
from .util import _none_set
|
||||
from . import attributes
|
||||
from . import attributes, strategies
|
||||
from .. import util, sql, exc as sa_exc, event, schema
|
||||
from ..sql import expression
|
||||
from . import properties
|
||||
from . import query
|
||||
properties = util.importlater('sqlalchemy.orm', 'properties')
|
||||
|
||||
|
||||
class DescriptorProperty(MapperProperty):
|
||||
@ -76,7 +75,6 @@ class DescriptorProperty(MapperProperty):
|
||||
mapper.class_manager.instrument_attribute(self.key, proxy_attr)
|
||||
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
|
||||
class CompositeProperty(DescriptorProperty):
|
||||
"""Defines a "composite" mapped attribute, representing a collection
|
||||
of columns as one attribute.
|
||||
@ -84,64 +82,12 @@ class CompositeProperty(DescriptorProperty):
|
||||
:class:`.CompositeProperty` is constructed using the :func:`.composite`
|
||||
function.
|
||||
|
||||
.. seealso::
|
||||
See also:
|
||||
|
||||
:ref:`mapper_composite`
|
||||
:ref:`mapper_composite`
|
||||
|
||||
"""
|
||||
def __init__(self, class_, *attrs, **kwargs):
|
||||
"""Return a composite column-based property for use with a Mapper.
|
||||
|
||||
See the mapping documentation section :ref:`mapper_composite` for a full
|
||||
usage example.
|
||||
|
||||
The :class:`.MapperProperty` returned by :func:`.composite`
|
||||
is the :class:`.CompositeProperty`.
|
||||
|
||||
:param class\_:
|
||||
The "composite type" class.
|
||||
|
||||
:param \*cols:
|
||||
List of Column objects to be mapped.
|
||||
|
||||
:param active_history=False:
|
||||
When ``True``, indicates that the "previous" value for a
|
||||
scalar attribute should be loaded when replaced, if not
|
||||
already loaded. See the same flag on :func:`.column_property`.
|
||||
|
||||
.. versionchanged:: 0.7
|
||||
This flag specifically becomes meaningful
|
||||
- previously it was a placeholder.
|
||||
|
||||
:param group:
|
||||
A group name for this property when marked as deferred.
|
||||
|
||||
:param deferred:
|
||||
When True, the column property is "deferred", meaning that it does not
|
||||
load immediately, and is instead loaded when the attribute is first
|
||||
accessed on an instance. See also :func:`~sqlalchemy.orm.deferred`.
|
||||
|
||||
:param comparator_factory: a class which extends
|
||||
:class:`.CompositeProperty.Comparator` which provides custom SQL clause
|
||||
generation for comparison operations.
|
||||
|
||||
:param doc:
|
||||
optional string that will be applied as the doc on the
|
||||
class-bound descriptor.
|
||||
|
||||
:param info: Optional data dictionary which will be populated into the
|
||||
:attr:`.MapperProperty.info` attribute of this object.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
|
||||
:param extension:
|
||||
an :class:`.AttributeExtension` instance,
|
||||
or list of extensions, which will be prepended to the list of
|
||||
attribute listeners for the resulting descriptor placed on the class.
|
||||
**Deprecated.** Please see :class:`.AttributeEvents`.
|
||||
|
||||
"""
|
||||
|
||||
self.attrs = attrs
|
||||
self.composite_class = class_
|
||||
self.active_history = kwargs.get('active_history', False)
|
||||
@ -155,7 +101,6 @@ class CompositeProperty(DescriptorProperty):
|
||||
util.set_creation_order(self)
|
||||
self._create_descriptor()
|
||||
|
||||
|
||||
def instrument_class(self, mapper):
|
||||
super(CompositeProperty, self).instrument_class(mapper)
|
||||
self._setup_event_handlers()
|
||||
@ -165,6 +110,7 @@ class CompositeProperty(DescriptorProperty):
|
||||
has been associated with its parent mapper.
|
||||
|
||||
"""
|
||||
self._init_props()
|
||||
self._setup_arguments_on_columns()
|
||||
|
||||
def _create_descriptor(self):
|
||||
@ -235,12 +181,11 @@ class CompositeProperty(DescriptorProperty):
|
||||
for prop in self.props
|
||||
]
|
||||
|
||||
@util.memoized_property
|
||||
def props(self):
|
||||
props = []
|
||||
def _init_props(self):
|
||||
self.props = props = []
|
||||
for attr in self.attrs:
|
||||
if isinstance(attr, str):
|
||||
prop = self.parent.get_property(attr, _configure_mappers=False)
|
||||
if isinstance(attr, basestring):
|
||||
prop = self.parent.get_property(attr)
|
||||
elif isinstance(attr, schema.Column):
|
||||
prop = self.parent._columntoproperty[attr]
|
||||
elif isinstance(attr, attributes.InstrumentedAttribute):
|
||||
@ -251,7 +196,6 @@ class CompositeProperty(DescriptorProperty):
|
||||
"attributes/attribute names as arguments, got: %r"
|
||||
% (attr,))
|
||||
props.append(prop)
|
||||
return props
|
||||
|
||||
@property
|
||||
def columns(self):
|
||||
@ -266,9 +210,7 @@ class CompositeProperty(DescriptorProperty):
|
||||
prop.active_history = self.active_history
|
||||
if self.deferred:
|
||||
prop.deferred = self.deferred
|
||||
prop.strategy_class = prop._strategy_lookup(
|
||||
("deferred", True),
|
||||
("instrument", True))
|
||||
prop.strategy_class = strategies.DeferredColumnLoader
|
||||
prop.group = self.group
|
||||
|
||||
def _setup_event_handlers(self):
|
||||
@ -363,18 +305,6 @@ class CompositeProperty(DescriptorProperty):
|
||||
def _comparator_factory(self, mapper):
|
||||
return self.comparator_factory(self, mapper)
|
||||
|
||||
class CompositeBundle(query.Bundle):
|
||||
def __init__(self, property, expr):
|
||||
self.property = property
|
||||
super(CompositeProperty.CompositeBundle, self).__init__(
|
||||
property.key, *expr)
|
||||
|
||||
def create_row_processor(self, query, procs, labels):
|
||||
def proc(row, result):
|
||||
return self.property.composite_class(*[proc(row, result) for proc in procs])
|
||||
return proc
|
||||
|
||||
|
||||
class Comparator(PropComparator):
|
||||
"""Produce boolean, comparison, and other operators for
|
||||
:class:`.CompositeProperty` attributes.
|
||||
@ -394,28 +324,23 @@ class CompositeProperty(DescriptorProperty):
|
||||
|
||||
"""
|
||||
|
||||
|
||||
__hash__ = None
|
||||
|
||||
@property
|
||||
def clauses(self):
|
||||
return self.__clause_element__()
|
||||
|
||||
def __clause_element__(self):
|
||||
return expression.ClauseList(group=False, *self._comparable_elements)
|
||||
|
||||
def _query_clause_element(self):
|
||||
return CompositeProperty.CompositeBundle(self.prop, self.__clause_element__())
|
||||
__hash__ = None
|
||||
|
||||
@util.memoized_property
|
||||
def _comparable_elements(self):
|
||||
if self._adapt_to_entity:
|
||||
return [
|
||||
getattr(
|
||||
self._adapt_to_entity.entity,
|
||||
prop.key
|
||||
) for prop in self.prop._comparable_elements
|
||||
]
|
||||
if self.adapter:
|
||||
# we need to do a little fudging here because
|
||||
# the adapter function we're given only accepts
|
||||
# ColumnElements, but our prop._comparable_elements is returning
|
||||
# InstrumentedAttribute, because we support the use case
|
||||
# of composites that refer to relationships. The better
|
||||
# solution here is to open up how AliasedClass interacts
|
||||
# with PropComparators so more context is available.
|
||||
return [self.adapter(x.__clause_element__())
|
||||
for x in self.prop._comparable_elements]
|
||||
else:
|
||||
return self.prop._comparable_elements
|
||||
|
||||
@ -428,7 +353,7 @@ class CompositeProperty(DescriptorProperty):
|
||||
a == b
|
||||
for a, b in zip(self.prop._comparable_elements, values)
|
||||
]
|
||||
if self._adapt_to_entity:
|
||||
if self.adapter:
|
||||
comparisons = [self.adapter(x) for x in comparisons]
|
||||
return sql.and_(*comparisons)
|
||||
|
||||
@ -439,7 +364,6 @@ class CompositeProperty(DescriptorProperty):
|
||||
return str(self.parent.class_.__name__) + "." + self.key
|
||||
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
|
||||
class ConcreteInheritedProperty(DescriptorProperty):
|
||||
"""A 'do nothing' :class:`.MapperProperty` that disables
|
||||
an attribute on a concrete subclass that is only present
|
||||
@ -488,66 +412,11 @@ class ConcreteInheritedProperty(DescriptorProperty):
|
||||
self.descriptor = NoninheritedConcreteProp()
|
||||
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
|
||||
class SynonymProperty(DescriptorProperty):
|
||||
|
||||
def __init__(self, name, map_column=None,
|
||||
descriptor=None, comparator_factory=None,
|
||||
doc=None):
|
||||
"""Denote an attribute name as a synonym to a mapped property,
|
||||
in that the attribute will mirror the value and expression behavior
|
||||
of another attribute.
|
||||
|
||||
:param name: the name of the existing mapped property. This
|
||||
can refer to the string name of any :class:`.MapperProperty`
|
||||
configured on the class, including column-bound attributes
|
||||
and relationships.
|
||||
|
||||
:param descriptor: a Python :term:`descriptor` that will be used
|
||||
as a getter (and potentially a setter) when this attribute is
|
||||
accessed at the instance level.
|
||||
|
||||
:param map_column: if ``True``, the :func:`.synonym` construct will
|
||||
locate the existing named :class:`.MapperProperty` based on the
|
||||
attribute name of this :func:`.synonym`, and assign it to a new
|
||||
attribute linked to the name of this :func:`.synonym`.
|
||||
That is, given a mapping like::
|
||||
|
||||
class MyClass(Base):
|
||||
__tablename__ = 'my_table'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
job_status = Column(String(50))
|
||||
|
||||
job_status = synonym("_job_status", map_column=True)
|
||||
|
||||
The above class ``MyClass`` will now have the ``job_status``
|
||||
:class:`.Column` object mapped to the attribute named ``_job_status``,
|
||||
and the attribute named ``job_status`` will refer to the synonym
|
||||
itself. This feature is typically used in conjunction with the
|
||||
``descriptor`` argument in order to link a user-defined descriptor
|
||||
as a "wrapper" for an existing column.
|
||||
|
||||
:param comparator_factory: A subclass of :class:`.PropComparator`
|
||||
that will provide custom comparison behavior at the SQL expression
|
||||
level.
|
||||
|
||||
.. note::
|
||||
|
||||
For the use case of providing an attribute which redefines both
|
||||
Python-level and SQL-expression level behavior of an attribute,
|
||||
please refer to the Hybrid attribute introduced at
|
||||
:ref:`mapper_hybrids` for a more effective technique.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`synonyms` - examples of functionality.
|
||||
|
||||
:ref:`mapper_hybrids` - Hybrids provide a better approach for
|
||||
more complicated attribute-wrapping schemes than synonyms.
|
||||
|
||||
"""
|
||||
|
||||
self.name = name
|
||||
self.map_column = map_column
|
||||
self.descriptor = descriptor
|
||||
@ -601,72 +470,10 @@ class SynonymProperty(DescriptorProperty):
|
||||
self.parent = parent
|
||||
|
||||
|
||||
@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
|
||||
class ComparableProperty(DescriptorProperty):
|
||||
"""Instruments a Python property for use in query expressions."""
|
||||
|
||||
def __init__(self, comparator_factory, descriptor=None, doc=None):
|
||||
"""Provides a method of applying a :class:`.PropComparator`
|
||||
to any Python descriptor attribute.
|
||||
|
||||
.. versionchanged:: 0.7
|
||||
:func:`.comparable_property` is superseded by
|
||||
the :mod:`~sqlalchemy.ext.hybrid` extension. See the example
|
||||
at :ref:`hybrid_custom_comparators`.
|
||||
|
||||
Allows any Python descriptor to behave like a SQL-enabled
|
||||
attribute when used at the class level in queries, allowing
|
||||
redefinition of expression operator behavior.
|
||||
|
||||
In the example below we redefine :meth:`.PropComparator.operate`
|
||||
to wrap both sides of an expression in ``func.lower()`` to produce
|
||||
case-insensitive comparison::
|
||||
|
||||
from sqlalchemy.orm import comparable_property
|
||||
from sqlalchemy.orm.interfaces import PropComparator
|
||||
from sqlalchemy.sql import func
|
||||
from sqlalchemy import Integer, String, Column
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
class CaseInsensitiveComparator(PropComparator):
|
||||
def __clause_element__(self):
|
||||
return self.prop
|
||||
|
||||
def operate(self, op, other):
|
||||
return op(
|
||||
func.lower(self.__clause_element__()),
|
||||
func.lower(other)
|
||||
)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class SearchWord(Base):
|
||||
__tablename__ = 'search_word'
|
||||
id = Column(Integer, primary_key=True)
|
||||
word = Column(String)
|
||||
word_insensitive = comparable_property(lambda prop, mapper:
|
||||
CaseInsensitiveComparator(mapper.c.word, mapper)
|
||||
)
|
||||
|
||||
|
||||
A mapping like the above allows the ``word_insensitive`` attribute
|
||||
to render an expression like::
|
||||
|
||||
>>> print SearchWord.word_insensitive == "Trucks"
|
||||
lower(search_word.word) = lower(:lower_1)
|
||||
|
||||
:param comparator_factory:
|
||||
A PropComparator subclass or factory that defines operator behavior
|
||||
for this property.
|
||||
|
||||
:param descriptor:
|
||||
Optional when used in a ``properties={}`` declaration. The Python
|
||||
descriptor or property to layer comparison behavior on top of.
|
||||
|
||||
The like-named descriptor will be automatically retrieved from the
|
||||
mapped class if left blank in a ``properties`` declaration.
|
||||
|
||||
"""
|
||||
self.descriptor = descriptor
|
||||
self.comparator_factory = comparator_factory
|
||||
self.doc = doc or (descriptor and descriptor.__doc__) or None
|
||||
@ -674,5 +481,3 @@ class ComparableProperty(DescriptorProperty):
|
||||
|
||||
def _comparator_factory(self, mapper):
|
||||
return self.comparator_factory(self, mapper)
|
||||
|
||||
|
||||
|
@ -15,12 +15,11 @@ from .. import log, util, exc
|
||||
from ..sql import operators
|
||||
from . import (
|
||||
attributes, object_session, util as orm_util, strategies,
|
||||
object_mapper, exc as orm_exc, properties
|
||||
object_mapper, exc as orm_exc
|
||||
)
|
||||
from .query import Query
|
||||
|
||||
@log.class_logger
|
||||
@properties.RelationshipProperty.strategy_for(lazy="dynamic")
|
||||
|
||||
class DynaLoader(strategies.AbstractRelationshipLoader):
|
||||
def init_class_attribute(self, mapper):
|
||||
self.is_class_level = True
|
||||
@ -40,6 +39,9 @@ class DynaLoader(strategies.AbstractRelationshipLoader):
|
||||
backref=self.parent_property.back_populates,
|
||||
)
|
||||
|
||||
log.class_logger(DynaLoader)
|
||||
|
||||
|
||||
class DynamicAttributeImpl(attributes.AttributeImpl):
|
||||
uses_objects = True
|
||||
accepts_scalar_loader = False
|
||||
@ -76,14 +78,6 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
|
||||
history = self._get_collection_history(state, passive)
|
||||
return history.added_plus_unchanged
|
||||
|
||||
@util.memoized_property
|
||||
def _append_token(self):
|
||||
return attributes.Event(self, attributes.OP_APPEND)
|
||||
|
||||
@util.memoized_property
|
||||
def _remove_token(self):
|
||||
return attributes.Event(self, attributes.OP_REMOVE)
|
||||
|
||||
def fire_append_event(self, state, dict_, value, initiator,
|
||||
collection_history=None):
|
||||
if collection_history is None:
|
||||
@ -92,7 +86,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
|
||||
collection_history.add_added(value)
|
||||
|
||||
for fn in self.dispatch.append:
|
||||
value = fn(state, value, initiator or self._append_token)
|
||||
value = fn(state, value, initiator or self)
|
||||
|
||||
if self.trackparent and value is not None:
|
||||
self.sethasparent(attributes.instance_state(value), state, True)
|
||||
@ -108,7 +102,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
|
||||
self.sethasparent(attributes.instance_state(value), state, False)
|
||||
|
||||
for fn in self.dispatch.remove:
|
||||
fn(state, value, initiator or self._remove_token)
|
||||
fn(state, value, initiator or self)
|
||||
|
||||
def _modified_event(self, state, dict_):
|
||||
|
||||
|
@ -13,7 +13,9 @@ class UnevaluatableError(Exception):
|
||||
|
||||
_straight_ops = set(getattr(operators, op)
|
||||
for op in ('add', 'mul', 'sub',
|
||||
# Py2K
|
||||
'div',
|
||||
# end Py2K
|
||||
'mod', 'truediv',
|
||||
'lt', 'le', 'ne', 'gt', 'ge', 'eq'))
|
||||
|
||||
@ -54,7 +56,7 @@ class EvaluatorCompiler(object):
|
||||
return lambda obj: get_corresponding_attr(obj)
|
||||
|
||||
def visit_clauselist(self, clause):
|
||||
evaluators = list(map(self.process, clause.clauses))
|
||||
evaluators = map(self.process, clause.clauses)
|
||||
if clause.operator is operators.or_:
|
||||
def evaluate(obj):
|
||||
has_null = False
|
||||
@ -83,8 +85,8 @@ class EvaluatorCompiler(object):
|
||||
return evaluate
|
||||
|
||||
def visit_binary(self, clause):
|
||||
eval_left, eval_right = list(map(self.process,
|
||||
[clause.left, clause.right]))
|
||||
eval_left, eval_right = map(self.process,
|
||||
[clause.left, clause.right])
|
||||
operator = clause.operator
|
||||
if operator is operators.is_:
|
||||
def evaluate(obj):
|
||||
|
@ -8,14 +8,10 @@
|
||||
|
||||
"""
|
||||
from .. import event, exc, util
|
||||
from .base import _mapper_or_none
|
||||
orm = util.importlater("sqlalchemy", "orm")
|
||||
import inspect
|
||||
import weakref
|
||||
from . import interfaces
|
||||
from . import mapperlib, instrumentation
|
||||
from .session import Session, sessionmaker
|
||||
from .scoping import scoped_session
|
||||
from .attributes import QueryableAttribute
|
||||
|
||||
|
||||
class InstrumentationEvents(event.Events):
|
||||
"""Events related to class instrumentation events.
|
||||
@ -46,21 +42,16 @@ class InstrumentationEvents(event.Events):
|
||||
|
||||
"""
|
||||
|
||||
_target_class_doc = "SomeBaseClass"
|
||||
_dispatch_target = instrumentation.InstrumentationFactory
|
||||
|
||||
|
||||
@classmethod
|
||||
def _accept_with(cls, target):
|
||||
# TODO: there's no coverage for this
|
||||
if isinstance(target, type):
|
||||
return _InstrumentationEventsHold(target)
|
||||
else:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _listen(cls, event_key, propagate=True, **kw):
|
||||
target, identifier, fn = \
|
||||
event_key.dispatch_target, event_key.identifier, event_key.fn
|
||||
def _listen(cls, target, identifier, fn, propagate=True):
|
||||
|
||||
def listen(target_cls, *arg):
|
||||
listen_cls = target()
|
||||
@ -70,21 +61,22 @@ class InstrumentationEvents(event.Events):
|
||||
return fn(target_cls, *arg)
|
||||
|
||||
def remove(ref):
|
||||
key = event.registry._EventKey(None, identifier, listen,
|
||||
instrumentation._instrumentation_factory)
|
||||
getattr(instrumentation._instrumentation_factory.dispatch,
|
||||
identifier).remove(key)
|
||||
event.Events._remove(orm.instrumentation._instrumentation_factory,
|
||||
identifier, listen)
|
||||
|
||||
target = weakref.ref(target.class_, remove)
|
||||
event.Events._listen(orm.instrumentation._instrumentation_factory,
|
||||
identifier, listen)
|
||||
|
||||
event_key.\
|
||||
with_dispatch_target(instrumentation._instrumentation_factory).\
|
||||
with_wrapper(listen).base_listen(**kw)
|
||||
@classmethod
|
||||
def _remove(cls, identifier, target, fn):
|
||||
raise NotImplementedError("Removal of instrumentation events "
|
||||
"not yet implemented")
|
||||
|
||||
@classmethod
|
||||
def _clear(cls):
|
||||
super(InstrumentationEvents, cls)._clear()
|
||||
instrumentation._instrumentation_factory.dispatch._clear()
|
||||
orm.instrumentation._instrumentation_factory.dispatch._clear()
|
||||
|
||||
def class_instrument(self, cls):
|
||||
"""Called after the given class is instrumented.
|
||||
@ -106,7 +98,6 @@ class InstrumentationEvents(event.Events):
|
||||
"""Called when an attribute is instrumented."""
|
||||
|
||||
|
||||
|
||||
class _InstrumentationEventsHold(object):
|
||||
"""temporary marker object used to transfer from _accept_with() to
|
||||
_listen() on the InstrumentationEvents class.
|
||||
@ -117,6 +108,7 @@ class _InstrumentationEventsHold(object):
|
||||
|
||||
dispatch = event.dispatcher(InstrumentationEvents)
|
||||
|
||||
|
||||
class InstanceEvents(event.Events):
|
||||
"""Define events specific to object lifecycle.
|
||||
|
||||
@ -157,29 +149,19 @@ class InstanceEvents(event.Events):
|
||||
object, rather than the mapped instance itself.
|
||||
|
||||
"""
|
||||
|
||||
_target_class_doc = "SomeClass"
|
||||
|
||||
_dispatch_target = instrumentation.ClassManager
|
||||
|
||||
@classmethod
|
||||
def _new_classmanager_instance(cls, class_, classmanager):
|
||||
_InstanceEventsHold.populate(class_, classmanager)
|
||||
|
||||
@classmethod
|
||||
@util.dependencies("sqlalchemy.orm")
|
||||
def _accept_with(cls, orm, target):
|
||||
if isinstance(target, instrumentation.ClassManager):
|
||||
def _accept_with(cls, target):
|
||||
if isinstance(target, orm.instrumentation.ClassManager):
|
||||
return target
|
||||
elif isinstance(target, mapperlib.Mapper):
|
||||
elif isinstance(target, orm.Mapper):
|
||||
return target.class_manager
|
||||
elif target is orm.mapper:
|
||||
return instrumentation.ClassManager
|
||||
return orm.instrumentation.ClassManager
|
||||
elif isinstance(target, type):
|
||||
if issubclass(target, mapperlib.Mapper):
|
||||
return instrumentation.ClassManager
|
||||
if issubclass(target, orm.Mapper):
|
||||
return orm.instrumentation.ClassManager
|
||||
else:
|
||||
manager = instrumentation.manager_of_class(target)
|
||||
manager = orm.instrumentation.manager_of_class(target)
|
||||
if manager:
|
||||
return manager
|
||||
else:
|
||||
@ -187,20 +169,23 @@ class InstanceEvents(event.Events):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _listen(cls, event_key, raw=False, propagate=False, **kw):
|
||||
target, identifier, fn = \
|
||||
event_key.dispatch_target, event_key.identifier, event_key.fn
|
||||
|
||||
def _listen(cls, target, identifier, fn, raw=False, propagate=False):
|
||||
if not raw:
|
||||
orig_fn = fn
|
||||
|
||||
def wrap(state, *arg, **kw):
|
||||
return fn(state.obj(), *arg, **kw)
|
||||
event_key = event_key.with_wrapper(wrap)
|
||||
|
||||
event_key.base_listen(propagate=propagate, **kw)
|
||||
return orig_fn(state.obj(), *arg, **kw)
|
||||
fn = wrap
|
||||
|
||||
event.Events._listen(target, identifier, fn, propagate=propagate)
|
||||
if propagate:
|
||||
for mgr in target.subclass_managers(True):
|
||||
event_key.with_dispatch_target(mgr).base_listen(propagate=True)
|
||||
event.Events._listen(mgr, identifier, fn, True)
|
||||
|
||||
@classmethod
|
||||
def _remove(cls, identifier, target, fn):
|
||||
msg = "Removal of instance events not yet implemented"
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
@classmethod
|
||||
def _clear(cls):
|
||||
@ -329,7 +314,8 @@ class InstanceEvents(event.Events):
|
||||
|
||||
"""
|
||||
|
||||
class _EventsHold(event.RefCollection):
|
||||
|
||||
class _EventsHold(object):
|
||||
"""Hold onto listeners against unmapped, uninstrumented classes.
|
||||
|
||||
Establish _listen() for that class' mapper/instrumentation when
|
||||
@ -344,20 +330,14 @@ class _EventsHold(event.RefCollection):
|
||||
cls.all_holds.clear()
|
||||
|
||||
class HoldEvents(object):
|
||||
_dispatch_target = None
|
||||
|
||||
@classmethod
|
||||
def _listen(cls, event_key, raw=False, propagate=False, **kw):
|
||||
target, identifier, fn = \
|
||||
event_key.dispatch_target, event_key.identifier, event_key.fn
|
||||
|
||||
def _listen(cls, target, identifier, fn, raw=False, propagate=False):
|
||||
if target.class_ in target.all_holds:
|
||||
collection = target.all_holds[target.class_]
|
||||
else:
|
||||
collection = target.all_holds[target.class_] = {}
|
||||
collection = target.all_holds[target.class_] = []
|
||||
|
||||
event.registry._stored_in_collection(event_key, target)
|
||||
collection[event_key._key] = (event_key, raw, propagate)
|
||||
collection.append((identifier, fn, raw, propagate))
|
||||
|
||||
if propagate:
|
||||
stack = list(target.class_.__subclasses__())
|
||||
@ -366,40 +346,31 @@ class _EventsHold(event.RefCollection):
|
||||
stack.extend(subclass.__subclasses__())
|
||||
subject = target.resolve(subclass)
|
||||
if subject is not None:
|
||||
# we are already going through __subclasses__()
|
||||
# so leave generic propagate flag False
|
||||
event_key.with_dispatch_target(subject).\
|
||||
listen(raw=raw, propagate=False, **kw)
|
||||
|
||||
def remove(self, event_key):
|
||||
target, identifier, fn = \
|
||||
event_key.dispatch_target, event_key.identifier, event_key.fn
|
||||
|
||||
if isinstance(target, _EventsHold):
|
||||
collection = target.all_holds[target.class_]
|
||||
del collection[event_key._key]
|
||||
subject.dispatch._listen(subject, identifier, fn,
|
||||
raw=raw, propagate=propagate)
|
||||
|
||||
@classmethod
|
||||
def populate(cls, class_, subject):
|
||||
for subclass in class_.__mro__:
|
||||
if subclass in cls.all_holds:
|
||||
collection = cls.all_holds[subclass]
|
||||
for event_key, raw, propagate in collection.values():
|
||||
for ident, fn, raw, propagate in collection:
|
||||
if propagate or subclass is class_:
|
||||
# since we can't be sure in what order different classes
|
||||
# in a hierarchy are triggered with populate(),
|
||||
# we rely upon _EventsHold for all event
|
||||
# assignment, instead of using the generic propagate
|
||||
# flag.
|
||||
event_key.with_dispatch_target(subject).\
|
||||
listen(raw=raw, propagate=False)
|
||||
subject.dispatch._listen(subject, ident,
|
||||
fn, raw=raw,
|
||||
propagate=False)
|
||||
|
||||
|
||||
class _InstanceEventsHold(_EventsHold):
|
||||
all_holds = weakref.WeakKeyDictionary()
|
||||
|
||||
def resolve(self, class_):
|
||||
return instrumentation.manager_of_class(class_)
|
||||
return orm.instrumentation.manager_of_class(class_)
|
||||
|
||||
class HoldInstanceEvents(_EventsHold.HoldEvents, InstanceEvents):
|
||||
pass
|
||||
@ -478,23 +449,15 @@ class MapperEvents(event.Events):
|
||||
|
||||
"""
|
||||
|
||||
_target_class_doc = "SomeClass"
|
||||
_dispatch_target = mapperlib.Mapper
|
||||
|
||||
@classmethod
|
||||
def _new_mapper_instance(cls, class_, mapper):
|
||||
_MapperEventsHold.populate(class_, mapper)
|
||||
|
||||
@classmethod
|
||||
@util.dependencies("sqlalchemy.orm")
|
||||
def _accept_with(cls, orm, target):
|
||||
def _accept_with(cls, target):
|
||||
if target is orm.mapper:
|
||||
return mapperlib.Mapper
|
||||
return orm.Mapper
|
||||
elif isinstance(target, type):
|
||||
if issubclass(target, mapperlib.Mapper):
|
||||
if issubclass(target, orm.Mapper):
|
||||
return target
|
||||
else:
|
||||
mapper = _mapper_or_none(target)
|
||||
mapper = orm.util._mapper_or_none(target)
|
||||
if mapper is not None:
|
||||
return mapper
|
||||
else:
|
||||
@ -503,16 +466,8 @@ class MapperEvents(event.Events):
|
||||
return target
|
||||
|
||||
@classmethod
|
||||
def _listen(cls, event_key, raw=False, retval=False, propagate=False, **kw):
|
||||
target, identifier, fn = \
|
||||
event_key.dispatch_target, event_key.identifier, event_key.fn
|
||||
|
||||
if identifier in ("before_configured", "after_configured") and \
|
||||
target is not mapperlib.Mapper:
|
||||
util.warn(
|
||||
"'before_configured' and 'after_configured' ORM events "
|
||||
"only invoke with the mapper() function or Mapper class "
|
||||
"as the target.")
|
||||
def _listen(cls, target, identifier, fn,
|
||||
raw=False, retval=False, propagate=False):
|
||||
|
||||
if not raw or not retval:
|
||||
if not raw:
|
||||
@ -523,23 +478,24 @@ class MapperEvents(event.Events):
|
||||
except ValueError:
|
||||
target_index = None
|
||||
|
||||
wrapped_fn = fn
|
||||
|
||||
def wrap(*arg, **kw):
|
||||
if not raw and target_index is not None:
|
||||
arg = list(arg)
|
||||
arg[target_index] = arg[target_index].obj()
|
||||
if not retval:
|
||||
fn(*arg, **kw)
|
||||
return interfaces.EXT_CONTINUE
|
||||
wrapped_fn(*arg, **kw)
|
||||
return orm.interfaces.EXT_CONTINUE
|
||||
else:
|
||||
return fn(*arg, **kw)
|
||||
event_key = event_key.with_wrapper(wrap)
|
||||
return wrapped_fn(*arg, **kw)
|
||||
fn = wrap
|
||||
|
||||
if propagate:
|
||||
for mapper in target.self_and_descendants:
|
||||
event_key.with_dispatch_target(mapper).base_listen(
|
||||
propagate=True, **kw)
|
||||
event.Events._listen(mapper, identifier, fn, propagate=True)
|
||||
else:
|
||||
event_key.base_listen(**kw)
|
||||
event.Events._listen(target, identifier, fn)
|
||||
|
||||
@classmethod
|
||||
def _clear(cls):
|
||||
@ -590,42 +546,6 @@ class MapperEvents(event.Events):
|
||||
"""
|
||||
# TODO: need coverage for this event
|
||||
|
||||
def before_configured(self):
|
||||
"""Called before a series of mappers have been configured.
|
||||
|
||||
This corresponds to the :func:`.orm.configure_mappers` call, which
|
||||
note is usually called automatically as mappings are first
|
||||
used.
|
||||
|
||||
This event can **only** be applied to the :class:`.Mapper` class
|
||||
or :func:`.mapper` function, and not to individual mappings or
|
||||
mapped classes. It is only invoked for all mappings as a whole::
|
||||
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
@event.listens_for(mapper, "before_configured")
|
||||
def go():
|
||||
# ...
|
||||
|
||||
Theoretically this event is called once per
|
||||
application, but is actually called any time new mappers
|
||||
are to be affected by a :func:`.orm.configure_mappers`
|
||||
call. If new mappings are constructed after existing ones have
|
||||
already been used, this event can be called again. To ensure
|
||||
that a particular event is only called once and no further, the
|
||||
``once=True`` argument (new in 0.9.4) can be applied::
|
||||
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
@event.listens_for(mapper, "before_configured", once=True)
|
||||
def go():
|
||||
# ...
|
||||
|
||||
|
||||
.. versionadded:: 0.9.3
|
||||
|
||||
"""
|
||||
|
||||
def after_configured(self):
|
||||
"""Called after a series of mappers have been configured.
|
||||
|
||||
@ -633,29 +553,11 @@ class MapperEvents(event.Events):
|
||||
note is usually called automatically as mappings are first
|
||||
used.
|
||||
|
||||
This event can **only** be applied to the :class:`.Mapper` class
|
||||
or :func:`.mapper` function, and not to individual mappings or
|
||||
mapped classes. It is only invoked for all mappings as a whole::
|
||||
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
@event.listens_for(mapper, "after_configured")
|
||||
def go():
|
||||
# ...
|
||||
|
||||
Theoretically this event is called once per
|
||||
application, but is actually called any time new mappers
|
||||
have been affected by a :func:`.orm.configure_mappers`
|
||||
call. If new mappings are constructed after existing ones have
|
||||
already been used, this event can be called again. To ensure
|
||||
that a particular event is only called once and no further, the
|
||||
``once=True`` argument (new in 0.9.4) can be applied::
|
||||
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
@event.listens_for(mapper, "after_configured", once=True)
|
||||
def go():
|
||||
# ...
|
||||
already been used, this event can be called again.
|
||||
|
||||
"""
|
||||
|
||||
@ -1145,11 +1047,17 @@ class MapperEvents(event.Events):
|
||||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _remove(cls, identifier, target, fn):
|
||||
"Removal of mapper events not yet implemented"
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
|
||||
class _MapperEventsHold(_EventsHold):
|
||||
all_holds = weakref.WeakKeyDictionary()
|
||||
|
||||
def resolve(self, class_):
|
||||
return _mapper_or_none(class_)
|
||||
return orm.util._mapper_or_none(class_)
|
||||
|
||||
class HoldMapperEvents(_EventsHold.HoldEvents, MapperEvents):
|
||||
pass
|
||||
@ -1174,45 +1082,45 @@ class SessionEvents(event.Events):
|
||||
|
||||
The :func:`~.event.listen` function will accept
|
||||
:class:`.Session` objects as well as the return result
|
||||
of :class:`~.sessionmaker()` and :class:`~.scoped_session()`.
|
||||
of :func:`.sessionmaker` and :func:`.scoped_session`.
|
||||
|
||||
Additionally, it accepts the :class:`.Session` class which
|
||||
will apply listeners to all :class:`.Session` instances
|
||||
globally.
|
||||
|
||||
"""
|
||||
|
||||
_target_class_doc = "SomeSessionOrFactory"
|
||||
|
||||
_dispatch_target = Session
|
||||
|
||||
@classmethod
|
||||
def _accept_with(cls, target):
|
||||
if isinstance(target, scoped_session):
|
||||
if isinstance(target, orm.scoped_session):
|
||||
|
||||
target = target.session_factory
|
||||
if not isinstance(target, sessionmaker) and \
|
||||
if not isinstance(target, orm.sessionmaker) and \
|
||||
(
|
||||
not isinstance(target, type) or
|
||||
not issubclass(target, Session)
|
||||
not issubclass(target, orm.Session)
|
||||
):
|
||||
raise exc.ArgumentError(
|
||||
"Session event listen on a scoped_session "
|
||||
"requires that its creation callable "
|
||||
"is associated with the Session class.")
|
||||
|
||||
if isinstance(target, sessionmaker):
|
||||
if isinstance(target, orm.sessionmaker):
|
||||
return target.class_
|
||||
elif isinstance(target, type):
|
||||
if issubclass(target, scoped_session):
|
||||
return Session
|
||||
elif issubclass(target, Session):
|
||||
if issubclass(target, orm.scoped_session):
|
||||
return orm.Session
|
||||
elif issubclass(target, orm.Session):
|
||||
return target
|
||||
elif isinstance(target, Session):
|
||||
elif isinstance(target, orm.Session):
|
||||
return target
|
||||
else:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _remove(cls, identifier, target, fn):
|
||||
msg = "Removal of session events not yet implemented"
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
def after_transaction_create(self, session, transaction):
|
||||
"""Execute when a new :class:`.SessionTransaction` is created.
|
||||
|
||||
@ -1261,7 +1169,7 @@ class SessionEvents(event.Events):
|
||||
|
||||
.. note::
|
||||
|
||||
The :meth:`~.SessionEvents.before_commit` hook is *not* per-flush,
|
||||
The :meth:`.before_commit` hook is *not* per-flush,
|
||||
that is, the :class:`.Session` can emit SQL to the database
|
||||
many times within the scope of a transaction.
|
||||
For interception of these events, use the :meth:`~.SessionEvents.before_flush`,
|
||||
@ -1353,9 +1261,9 @@ class SessionEvents(event.Events):
|
||||
|
||||
:param session: The target :class:`.Session`.
|
||||
:param previous_transaction: The :class:`.SessionTransaction`
|
||||
transactional marker object which was just closed. The current
|
||||
:class:`.SessionTransaction` for the given :class:`.Session` is
|
||||
available via the :attr:`.Session.transaction` attribute.
|
||||
transactional marker object which was just closed. The current
|
||||
:class:`.SessionTransaction` for the given :class:`.Session` is
|
||||
available via the :attr:`.Session.transaction` attribute.
|
||||
|
||||
.. versionadded:: 0.7.3
|
||||
|
||||
@ -1447,7 +1355,7 @@ class SessionEvents(event.Events):
|
||||
This is called before an add, delete or merge causes
|
||||
the object to be part of the session.
|
||||
|
||||
.. versionadded:: 0.8. Note that :meth:`~.SessionEvents.after_attach` now
|
||||
.. versionadded:: 0.8. Note that :meth:`.after_attach` now
|
||||
fires off after the item is part of the session.
|
||||
:meth:`.before_attach` is provided for those cases where
|
||||
the item should not yet be part of the session state.
|
||||
@ -1480,55 +1388,31 @@ class SessionEvents(event.Events):
|
||||
|
||||
"""
|
||||
|
||||
@event._legacy_signature("0.9",
|
||||
["session", "query", "query_context", "result"],
|
||||
lambda update_context: (
|
||||
update_context.session,
|
||||
update_context.query,
|
||||
update_context.context,
|
||||
update_context.result))
|
||||
def after_bulk_update(self, update_context):
|
||||
def after_bulk_update(self, session, query, query_context, result):
|
||||
"""Execute after a bulk update operation to the session.
|
||||
|
||||
This is called as a result of the :meth:`.Query.update` method.
|
||||
|
||||
:param update_context: an "update context" object which contains
|
||||
details about the update, including these attributes:
|
||||
|
||||
* ``session`` - the :class:`.Session` involved
|
||||
* ``query`` -the :class:`.Query` object that this update operation was
|
||||
called upon.
|
||||
* ``context`` The :class:`.QueryContext` object, corresponding
|
||||
to the invocation of an ORM query.
|
||||
* ``result`` the :class:`.ResultProxy` returned as a result of the
|
||||
bulk UPDATE operation.
|
||||
|
||||
:param query: the :class:`.Query` object that this update operation was
|
||||
called upon.
|
||||
:param query_context: The :class:`.QueryContext` object, corresponding
|
||||
to the invocation of an ORM query.
|
||||
:param result: the :class:`.ResultProxy` returned as a result of the
|
||||
bulk UPDATE operation.
|
||||
|
||||
"""
|
||||
|
||||
@event._legacy_signature("0.9",
|
||||
["session", "query", "query_context", "result"],
|
||||
lambda delete_context: (
|
||||
delete_context.session,
|
||||
delete_context.query,
|
||||
delete_context.context,
|
||||
delete_context.result))
|
||||
def after_bulk_delete(self, delete_context):
|
||||
def after_bulk_delete(self, session, query, query_context, result):
|
||||
"""Execute after a bulk delete operation to the session.
|
||||
|
||||
This is called as a result of the :meth:`.Query.delete` method.
|
||||
|
||||
:param delete_context: a "delete context" object which contains
|
||||
details about the update, including these attributes:
|
||||
|
||||
* ``session`` - the :class:`.Session` involved
|
||||
* ``query`` -the :class:`.Query` object that this update operation was
|
||||
called upon.
|
||||
* ``context`` The :class:`.QueryContext` object, corresponding
|
||||
to the invocation of an ORM query.
|
||||
* ``result`` the :class:`.ResultProxy` returned as a result of the
|
||||
bulk DELETE operation.
|
||||
|
||||
:param query: the :class:`.Query` object that this update operation was
|
||||
called upon.
|
||||
:param query_context: The :class:`.QueryContext` object, corresponding
|
||||
to the invocation of an ORM query.
|
||||
:param result: the :class:`.ResultProxy` returned as a result of the
|
||||
bulk DELETE operation.
|
||||
|
||||
"""
|
||||
|
||||
@ -1562,7 +1446,7 @@ class AttributeEvents(event.Events):
|
||||
listen(UserContact.phone, 'set', validate_phone, retval=True)
|
||||
|
||||
A validation function like the above can also raise an exception
|
||||
such as :exc:`ValueError` to halt the operation.
|
||||
such as :class:`.ValueError` to halt the operation.
|
||||
|
||||
Several modifiers are available to the :func:`~.event.listen` function.
|
||||
|
||||
@ -1590,51 +1474,49 @@ class AttributeEvents(event.Events):
|
||||
|
||||
"""
|
||||
|
||||
_target_class_doc = "SomeClass.some_attribute"
|
||||
_dispatch_target = QueryableAttribute
|
||||
|
||||
@staticmethod
|
||||
def _set_dispatch(cls, dispatch_cls):
|
||||
event.Events._set_dispatch(cls, dispatch_cls)
|
||||
dispatch_cls._active_history = False
|
||||
|
||||
@classmethod
|
||||
def _accept_with(cls, target):
|
||||
# TODO: coverage
|
||||
if isinstance(target, interfaces.MapperProperty):
|
||||
if isinstance(target, orm.interfaces.MapperProperty):
|
||||
return getattr(target.parent.class_, target.key)
|
||||
else:
|
||||
return target
|
||||
|
||||
@classmethod
|
||||
def _listen(cls, event_key, active_history=False,
|
||||
def _listen(cls, target, identifier, fn, active_history=False,
|
||||
raw=False, retval=False,
|
||||
propagate=False):
|
||||
|
||||
target, identifier, fn = \
|
||||
event_key.dispatch_target, event_key.identifier, event_key.fn
|
||||
|
||||
if active_history:
|
||||
target.dispatch._active_history = True
|
||||
|
||||
# TODO: for removal, need to package the identity
|
||||
# of the wrapper with the original function.
|
||||
|
||||
if not raw or not retval:
|
||||
orig_fn = fn
|
||||
|
||||
def wrap(target, value, *arg):
|
||||
if not raw:
|
||||
target = target.obj()
|
||||
if not retval:
|
||||
fn(target, value, *arg)
|
||||
orig_fn(target, value, *arg)
|
||||
return value
|
||||
else:
|
||||
return fn(target, value, *arg)
|
||||
event_key = event_key.with_wrapper(wrap)
|
||||
return orig_fn(target, value, *arg)
|
||||
fn = wrap
|
||||
|
||||
event_key.base_listen(propagate=propagate)
|
||||
event.Events._listen(target, identifier, fn, propagate)
|
||||
|
||||
if propagate:
|
||||
manager = instrumentation.manager_of_class(target.class_)
|
||||
manager = orm.instrumentation.manager_of_class(target.class_)
|
||||
|
||||
for mgr in manager.subclass_managers(True):
|
||||
event_key.with_dispatch_target(mgr[target.key]).base_listen(propagate=True)
|
||||
event.Events._listen(mgr[target.key], identifier, fn, True)
|
||||
|
||||
@classmethod
|
||||
def _remove(cls, identifier, target, fn):
|
||||
msg = "Removal of attribute events not yet implemented"
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
def append(self, target, value, initiator):
|
||||
"""Receive a collection append event.
|
||||
@ -1646,15 +1528,8 @@ class AttributeEvents(event.Events):
|
||||
is registered with ``retval=True``, the listener
|
||||
function must return this value, or a new value which
|
||||
replaces it.
|
||||
:param initiator: An instance of :class:`.attributes.Event`
|
||||
representing the initiation of the event. May be modified
|
||||
from its original value by backref handlers in order to control
|
||||
chained event propagation.
|
||||
|
||||
.. versionchanged:: 0.9.0 the ``initiator`` argument is now
|
||||
passed as a :class:`.attributes.Event` object, and may be modified
|
||||
by backref handlers within a chain of backref-linked events.
|
||||
|
||||
:param initiator: the attribute implementation object
|
||||
which initiated this event.
|
||||
:return: if the event was registered with ``retval=True``,
|
||||
the given value, or a new effective value, should be returned.
|
||||
|
||||
@ -1667,15 +1542,8 @@ class AttributeEvents(event.Events):
|
||||
If the listener is registered with ``raw=True``, this will
|
||||
be the :class:`.InstanceState` object.
|
||||
:param value: the value being removed.
|
||||
:param initiator: An instance of :class:`.attributes.Event`
|
||||
representing the initiation of the event. May be modified
|
||||
from its original value by backref handlers in order to control
|
||||
chained event propagation.
|
||||
|
||||
.. versionchanged:: 0.9.0 the ``initiator`` argument is now
|
||||
passed as a :class:`.attributes.Event` object, and may be modified
|
||||
by backref handlers within a chain of backref-linked events.
|
||||
|
||||
:param initiator: the attribute implementation object
|
||||
which initiated this event.
|
||||
:return: No return value is defined for this event.
|
||||
"""
|
||||
|
||||
@ -1695,17 +1563,9 @@ class AttributeEvents(event.Events):
|
||||
the previous value of the attribute will be loaded from
|
||||
the database if the existing value is currently unloaded
|
||||
or expired.
|
||||
:param initiator: An instance of :class:`.attributes.Event`
|
||||
representing the initiation of the event. May be modified
|
||||
from its original value by backref handlers in order to control
|
||||
chained event propagation.
|
||||
|
||||
.. versionchanged:: 0.9.0 the ``initiator`` argument is now
|
||||
passed as a :class:`.attributes.Event` object, and may be modified
|
||||
by backref handlers within a chain of backref-linked events.
|
||||
|
||||
:param initiator: the attribute implementation object
|
||||
which initiated this event.
|
||||
:return: if the event was registered with ``retval=True``,
|
||||
the given value, or a new effective value, should be returned.
|
||||
|
||||
"""
|
||||
|
||||
|
@ -6,6 +6,8 @@
|
||||
|
||||
"""SQLAlchemy ORM exceptions."""
|
||||
from .. import exc as sa_exc, util
|
||||
orm_util = util.importlater('sqlalchemy.orm', 'util')
|
||||
attributes = util.importlater('sqlalchemy.orm', 'attributes')
|
||||
|
||||
NO_STATE = (AttributeError, KeyError)
|
||||
"""Exception types that may be raised by instrumentation implementations."""
|
||||
@ -63,11 +65,10 @@ class DetachedInstanceError(sa_exc.SQLAlchemyError):
|
||||
class UnmappedInstanceError(UnmappedError):
|
||||
"""An mapping operation was requested for an unknown instance."""
|
||||
|
||||
@util.dependencies("sqlalchemy.orm.base")
|
||||
def __init__(self, base, obj, msg=None):
|
||||
def __init__(self, obj, msg=None):
|
||||
if not msg:
|
||||
try:
|
||||
base.class_mapper(type(obj))
|
||||
mapper = orm_util.class_mapper(type(obj))
|
||||
name = _safe_cls_name(type(obj))
|
||||
msg = ("Class %r is mapped, but this instance lacks "
|
||||
"instrumentation. This occurs when the instance"
|
||||
@ -116,11 +117,10 @@ class ObjectDeletedError(sa_exc.InvalidRequestError):
|
||||
object.
|
||||
|
||||
"""
|
||||
@util.dependencies("sqlalchemy.orm.base")
|
||||
def __init__(self, base, state, msg=None):
|
||||
def __init__(self, state, msg=None):
|
||||
if not msg:
|
||||
msg = "Instance '%s' has been deleted, or its "\
|
||||
"row is otherwise not present." % base.state_str(state)
|
||||
"row is otherwise not present." % orm_util.state_str(state)
|
||||
|
||||
sa_exc.InvalidRequestError.__init__(self, msg)
|
||||
|
||||
@ -149,10 +149,10 @@ def _safe_cls_name(cls):
|
||||
cls_name = repr(cls)
|
||||
return cls_name
|
||||
|
||||
@util.dependencies("sqlalchemy.orm.base")
|
||||
def _default_unmapped(base, cls):
|
||||
|
||||
def _default_unmapped(cls):
|
||||
try:
|
||||
mappers = base.manager_of_class(cls).mappers
|
||||
mappers = attributes.manager_of_class(cls).mappers
|
||||
except NO_STATE:
|
||||
mappers = {}
|
||||
except TypeError:
|
||||
|
@ -6,7 +6,7 @@
|
||||
|
||||
import weakref
|
||||
from . import attributes
|
||||
from .. import util
|
||||
|
||||
|
||||
class IdentityMap(dict):
|
||||
def __init__(self):
|
||||
@ -75,7 +75,7 @@ class WeakInstanceDict(IdentityMap):
|
||||
state = dict.__getitem__(self, key)
|
||||
o = state.obj()
|
||||
if o is None:
|
||||
raise KeyError(key)
|
||||
raise KeyError, key
|
||||
return o
|
||||
|
||||
def __contains__(self, key):
|
||||
@ -152,27 +152,30 @@ class WeakInstanceDict(IdentityMap):
|
||||
|
||||
return result
|
||||
|
||||
if util.py2k:
|
||||
items = _items
|
||||
values = _values
|
||||
# Py3K
|
||||
#def items(self):
|
||||
# return iter(self._items())
|
||||
#
|
||||
#def values(self):
|
||||
# return iter(self._values())
|
||||
# Py2K
|
||||
items = _items
|
||||
|
||||
def iteritems(self):
|
||||
return iter(self.items())
|
||||
def iteritems(self):
|
||||
return iter(self.items())
|
||||
|
||||
def itervalues(self):
|
||||
return iter(self.values())
|
||||
else:
|
||||
def items(self):
|
||||
return iter(self._items())
|
||||
values = _values
|
||||
|
||||
def values(self):
|
||||
return iter(self._values())
|
||||
def itervalues(self):
|
||||
return iter(self.values())
|
||||
# end Py2K
|
||||
|
||||
def all_states(self):
|
||||
if util.py2k:
|
||||
return dict.values(self)
|
||||
else:
|
||||
return list(dict.values(self))
|
||||
# Py3K
|
||||
# return list(dict.values(self))
|
||||
# Py2K
|
||||
return dict.values(self)
|
||||
# end Py2K
|
||||
|
||||
def discard(self, state):
|
||||
st = dict.get(self, state.key, None)
|
||||
@ -186,7 +189,7 @@ class WeakInstanceDict(IdentityMap):
|
||||
|
||||
class StrongInstanceDict(IdentityMap):
|
||||
def all_states(self):
|
||||
return [attributes.instance_state(o) for o in self.values()]
|
||||
return [attributes.instance_state(o) for o in self.itervalues()]
|
||||
|
||||
def contains_state(self, state):
|
||||
return (
|
||||
|
@ -29,15 +29,17 @@ alternate instrumentation forms.
|
||||
"""
|
||||
|
||||
|
||||
from . import exc, collections, interfaces, state
|
||||
from .. import util
|
||||
from . import base
|
||||
from . import exc, collections, events, interfaces
|
||||
from operator import attrgetter
|
||||
from .. import event, util
|
||||
state = util.importlater("sqlalchemy.orm", "state")
|
||||
|
||||
|
||||
class ClassManager(dict):
|
||||
"""tracks state information at the class level."""
|
||||
|
||||
MANAGER_ATTR = base.DEFAULT_MANAGER_ATTR
|
||||
STATE_ATTR = base.DEFAULT_STATE_ATTR
|
||||
MANAGER_ATTR = '_sa_class_manager'
|
||||
STATE_ATTR = '_sa_instance_state'
|
||||
|
||||
deferred_scalar_loader = None
|
||||
|
||||
@ -61,8 +63,7 @@ class ClassManager(dict):
|
||||
for base in self._bases:
|
||||
self.update(base)
|
||||
|
||||
self.dispatch._events._new_classmanager_instance(class_, self)
|
||||
#events._InstanceEventsHold.populate(class_, self)
|
||||
events._InstanceEventsHold.populate(class_, self)
|
||||
|
||||
for basecls in class_.__mro__:
|
||||
mgr = manager_of_class(basecls)
|
||||
@ -78,11 +79,7 @@ class ClassManager(dict):
|
||||
"reference cycles. Please remove this method." %
|
||||
class_)
|
||||
|
||||
def __hash__(self):
|
||||
return id(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
return other is self
|
||||
dispatch = event.dispatcher(events.InstanceEvents)
|
||||
|
||||
@property
|
||||
def is_mapped(self):
|
||||
@ -167,7 +164,9 @@ class ClassManager(dict):
|
||||
|
||||
@util.hybridmethod
|
||||
def manager_getter(self):
|
||||
return _default_manager_getter
|
||||
def manager_of_class(cls):
|
||||
return cls.__dict__.get(ClassManager.MANAGER_ATTR, None)
|
||||
return manager_of_class
|
||||
|
||||
@util.hybridmethod
|
||||
def state_getter(self):
|
||||
@ -178,12 +177,11 @@ class ClassManager(dict):
|
||||
instance.
|
||||
"""
|
||||
|
||||
return _default_state_getter
|
||||
return attrgetter(self.STATE_ATTR)
|
||||
|
||||
@util.hybridmethod
|
||||
def dict_getter(self):
|
||||
return _default_dict_getter
|
||||
|
||||
return attrgetter('__dict__')
|
||||
|
||||
def instrument_attribute(self, key, inst, propagated=False):
|
||||
if propagated:
|
||||
@ -281,7 +279,7 @@ class ClassManager(dict):
|
||||
|
||||
@property
|
||||
def attributes(self):
|
||||
return iter(self.values())
|
||||
return self.itervalues()
|
||||
|
||||
## InstanceState management
|
||||
|
||||
@ -298,9 +296,6 @@ class ClassManager(dict):
|
||||
def teardown_instance(self, instance):
|
||||
delattr(instance, self.STATE_ATTR)
|
||||
|
||||
def _serialize(self, state, state_dict):
|
||||
return _SerializeManager(state, state_dict)
|
||||
|
||||
def _new_state_if_none(self, instance):
|
||||
"""Install a default InstanceState if none is present.
|
||||
|
||||
@ -330,51 +325,20 @@ class ClassManager(dict):
|
||||
"""TODO"""
|
||||
return self.get_impl(key).hasparent(state, optimistic=optimistic)
|
||||
|
||||
def __bool__(self):
|
||||
def __nonzero__(self):
|
||||
"""All ClassManagers are non-zero regardless of attribute state."""
|
||||
return True
|
||||
|
||||
__nonzero__ = __bool__
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s of %r at %x>' % (
|
||||
self.__class__.__name__, self.class_, id(self))
|
||||
|
||||
class _SerializeManager(object):
|
||||
"""Provide serialization of a :class:`.ClassManager`.
|
||||
|
||||
The :class:`.InstanceState` uses ``__init__()`` on serialize
|
||||
and ``__call__()`` on deserialize.
|
||||
|
||||
"""
|
||||
def __init__(self, state, d):
|
||||
self.class_ = state.class_
|
||||
manager = state.manager
|
||||
manager.dispatch.pickle(state, d)
|
||||
|
||||
def __call__(self, state, inst, state_dict):
|
||||
state.manager = manager = manager_of_class(self.class_)
|
||||
if manager is None:
|
||||
raise exc.UnmappedInstanceError(
|
||||
inst,
|
||||
"Cannot deserialize object of type %r - "
|
||||
"no mapper() has "
|
||||
"been configured for this class within the current "
|
||||
"Python process!" %
|
||||
self.class_)
|
||||
elif manager.is_mapped and not manager.mapper.configured:
|
||||
manager.mapper._configure_all()
|
||||
|
||||
# setup _sa_instance_state ahead of time so that
|
||||
# unpickle events can access the object normally.
|
||||
# see [ticket:2362]
|
||||
if inst is not None:
|
||||
manager.setup_instance(inst, state)
|
||||
manager.dispatch.unpickle(state, state_dict)
|
||||
|
||||
class InstrumentationFactory(object):
|
||||
"""Factory for new ClassManager instances."""
|
||||
|
||||
dispatch = event.dispatcher(events.InstrumentationEvents)
|
||||
|
||||
def create_manager_for_cls(self, class_):
|
||||
assert class_ is not None
|
||||
assert manager_of_class(class_) is None
|
||||
@ -414,14 +378,6 @@ class InstrumentationFactory(object):
|
||||
# when importred.
|
||||
_instrumentation_factory = InstrumentationFactory()
|
||||
|
||||
# these attributes are replaced by sqlalchemy.ext.instrumentation
|
||||
# when a non-standard InstrumentationManager class is first
|
||||
# used to instrument a class.
|
||||
instance_state = _default_state_getter = base.instance_state
|
||||
|
||||
instance_dict = _default_dict_getter = base.instance_dict
|
||||
|
||||
manager_of_class = _default_manager_getter = base.manager_of_class
|
||||
|
||||
def register_class(class_):
|
||||
"""Register class instrumentation.
|
||||
@ -453,6 +409,15 @@ def is_instrumented(instance, key):
|
||||
return manager_of_class(instance.__class__).\
|
||||
is_instrumented(key, search=True)
|
||||
|
||||
# these attributes are replaced by sqlalchemy.ext.instrumentation
|
||||
# when a non-standard InstrumentationManager class is first
|
||||
# used to instrument a class.
|
||||
instance_state = _default_state_getter = ClassManager.state_getter()
|
||||
|
||||
instance_dict = _default_dict_getter = ClassManager.dict_getter()
|
||||
|
||||
manager_of_class = _default_manager_getter = ClassManager.manager_getter()
|
||||
|
||||
|
||||
def _generate_init(class_, class_manager):
|
||||
"""Build an __init__ decorator that triggers ClassManager events."""
|
||||
@ -479,21 +444,21 @@ def __init__(%(apply_pos)s):
|
||||
func_vars = util.format_argspec_init(original__init__, grouped=False)
|
||||
func_text = func_body % func_vars
|
||||
|
||||
if util.py2k:
|
||||
func = getattr(original__init__, 'im_func', original__init__)
|
||||
func_defaults = getattr(func, 'func_defaults', None)
|
||||
else:
|
||||
func_defaults = getattr(original__init__, '__defaults__', None)
|
||||
func_kw_defaults = getattr(original__init__, '__kwdefaults__', None)
|
||||
# Py3K
|
||||
#func_defaults = getattr(original__init__, '__defaults__', None)
|
||||
#func_kw_defaults = getattr(original__init__, '__kwdefaults__', None)
|
||||
# Py2K
|
||||
func = getattr(original__init__, 'im_func', original__init__)
|
||||
func_defaults = getattr(func, 'func_defaults', None)
|
||||
# end Py2K
|
||||
|
||||
env = locals().copy()
|
||||
exec(func_text, env)
|
||||
exec func_text in env
|
||||
__init__ = env['__init__']
|
||||
__init__.__doc__ = original__init__.__doc__
|
||||
|
||||
if func_defaults:
|
||||
__init__.__defaults__ = func_defaults
|
||||
if not util.py2k and func_kw_defaults:
|
||||
__init__.__kwdefaults__ = func_kw_defaults
|
||||
|
||||
__init__.func_defaults = func_defaults
|
||||
# Py3K
|
||||
#if func_kw_defaults:
|
||||
# __init__.__kwdefaults__ = func_kw_defaults
|
||||
return __init__
|
||||
|
@ -15,17 +15,14 @@ Other than the deprecated extensions, this module and the
|
||||
classes within should be considered mostly private.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .. import exc as sa_exc, util, inspect
|
||||
from ..sql import operators
|
||||
from collections import deque
|
||||
from .base import ONETOMANY, MANYTOONE, MANYTOMANY, EXT_CONTINUE, EXT_STOP, NOT_EXTENSION
|
||||
from .base import _InspectionAttr, _MappedAttribute
|
||||
from .path_registry import PathRegistry
|
||||
import collections
|
||||
|
||||
orm_util = util.importlater('sqlalchemy.orm', 'util')
|
||||
collections = util.importlater('sqlalchemy.orm', 'collections')
|
||||
|
||||
__all__ = (
|
||||
'AttributeExtension',
|
||||
@ -44,6 +41,97 @@ __all__ = (
|
||||
'StrategizedProperty',
|
||||
)
|
||||
|
||||
EXT_CONTINUE = util.symbol('EXT_CONTINUE')
|
||||
EXT_STOP = util.symbol('EXT_STOP')
|
||||
|
||||
ONETOMANY = util.symbol('ONETOMANY')
|
||||
MANYTOONE = util.symbol('MANYTOONE')
|
||||
MANYTOMANY = util.symbol('MANYTOMANY')
|
||||
|
||||
from .deprecated_interfaces import AttributeExtension, \
|
||||
SessionExtension, \
|
||||
MapperExtension
|
||||
|
||||
|
||||
NOT_EXTENSION = util.symbol('NOT_EXTENSION')
|
||||
"""Symbol indicating an :class:`_InspectionAttr` that's
|
||||
not part of sqlalchemy.ext.
|
||||
|
||||
Is assigned to the :attr:`._InspectionAttr.extension_type`
|
||||
attibute.
|
||||
|
||||
"""
|
||||
|
||||
class _InspectionAttr(object):
|
||||
"""A base class applied to all ORM objects that can be returned
|
||||
by the :func:`.inspect` function.
|
||||
|
||||
The attributes defined here allow the usage of simple boolean
|
||||
checks to test basic facts about the object returned.
|
||||
|
||||
While the boolean checks here are basically the same as using
|
||||
the Python isinstance() function, the flags here can be used without
|
||||
the need to import all of these classes, and also such that
|
||||
the SQLAlchemy class system can change while leaving the flags
|
||||
here intact for forwards-compatibility.
|
||||
|
||||
"""
|
||||
|
||||
is_selectable = False
|
||||
"""Return True if this object is an instance of :class:`.Selectable`."""
|
||||
|
||||
is_aliased_class = False
|
||||
"""True if this object is an instance of :class:`.AliasedClass`."""
|
||||
|
||||
is_instance = False
|
||||
"""True if this object is an instance of :class:`.InstanceState`."""
|
||||
|
||||
is_mapper = False
|
||||
"""True if this object is an instance of :class:`.Mapper`."""
|
||||
|
||||
is_property = False
|
||||
"""True if this object is an instance of :class:`.MapperProperty`."""
|
||||
|
||||
is_attribute = False
|
||||
"""True if this object is a Python :term:`descriptor`.
|
||||
|
||||
This can refer to one of many types. Usually a
|
||||
:class:`.QueryableAttribute` which handles attributes events on behalf
|
||||
of a :class:`.MapperProperty`. But can also be an extension type
|
||||
such as :class:`.AssociationProxy` or :class:`.hybrid_property`.
|
||||
The :attr:`._InspectionAttr.extension_type` will refer to a constant
|
||||
identifying the specific subtype.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Mapper.all_orm_descriptors`
|
||||
|
||||
"""
|
||||
|
||||
is_clause_element = False
|
||||
"""True if this object is an instance of :class:`.ClauseElement`."""
|
||||
|
||||
extension_type = NOT_EXTENSION
|
||||
"""The extension type, if any.
|
||||
Defaults to :data:`.interfaces.NOT_EXTENSION`
|
||||
|
||||
.. versionadded:: 0.8.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:data:`.HYBRID_METHOD`
|
||||
|
||||
:data:`.HYBRID_PROPERTY`
|
||||
|
||||
:data:`.ASSOCIATION_PROXY`
|
||||
|
||||
"""
|
||||
|
||||
class _MappedAttribute(object):
|
||||
"""Mixin for attributes which should be replaced by mapper-assigned
|
||||
attributes.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class MapperProperty(_MappedAttribute, _InspectionAttr):
|
||||
@ -311,33 +399,21 @@ class PropComparator(operators.ColumnOperators):
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, prop, parentmapper, adapt_to_entity=None):
|
||||
def __init__(self, prop, parentmapper, adapter=None):
|
||||
self.prop = self.property = prop
|
||||
self._parentmapper = parentmapper
|
||||
self._adapt_to_entity = adapt_to_entity
|
||||
self.adapter = adapter
|
||||
|
||||
def __clause_element__(self):
|
||||
raise NotImplementedError("%r" % self)
|
||||
|
||||
def _query_clause_element(self):
|
||||
return self.__clause_element__()
|
||||
|
||||
def adapt_to_entity(self, adapt_to_entity):
|
||||
def adapted(self, adapter):
|
||||
"""Return a copy of this PropComparator which will use the given
|
||||
:class:`.AliasedInsp` to produce corresponding expressions.
|
||||
"""
|
||||
return self.__class__(self.prop, self._parentmapper, adapt_to_entity)
|
||||
|
||||
@property
|
||||
def adapter(self):
|
||||
"""Produce a callable that adapts column expressions
|
||||
to suit an aliased version of this comparator.
|
||||
adaption function on the local side of generated expressions.
|
||||
|
||||
"""
|
||||
if self._adapt_to_entity is None:
|
||||
return None
|
||||
else:
|
||||
return self._adapt_to_entity._adapt_element
|
||||
|
||||
return self.__class__(self.prop, self._parentmapper, adapter)
|
||||
|
||||
@util.memoized_property
|
||||
def info(self):
|
||||
@ -423,57 +499,51 @@ class StrategizedProperty(MapperProperty):
|
||||
|
||||
strategy_wildcard_key = None
|
||||
|
||||
def _get_context_loader(self, context, path):
|
||||
load = None
|
||||
@util.memoized_property
|
||||
def _wildcard_path(self):
|
||||
if self.strategy_wildcard_key:
|
||||
return ('loaderstrategy', (self.strategy_wildcard_key,))
|
||||
else:
|
||||
return None
|
||||
|
||||
# use EntityRegistry.__getitem__()->PropRegistry here so
|
||||
# that the path is stated in terms of our base
|
||||
search_path = dict.__getitem__(path, self)
|
||||
def _get_context_strategy(self, context, path):
|
||||
strategy_cls = path._inlined_get_for(self, context, 'loaderstrategy')
|
||||
|
||||
# search among: exact match, "attr.*", "default" strategy
|
||||
# if any.
|
||||
for path_key in (
|
||||
search_path._loader_key,
|
||||
search_path._wildcard_path_loader_key,
|
||||
search_path._default_path_loader_key
|
||||
):
|
||||
if path_key in context.attributes:
|
||||
load = context.attributes[path_key]
|
||||
break
|
||||
if not strategy_cls:
|
||||
wc_key = self._wildcard_path
|
||||
if wc_key and wc_key in context.attributes:
|
||||
strategy_cls = context.attributes[wc_key]
|
||||
|
||||
return load
|
||||
if strategy_cls:
|
||||
try:
|
||||
return self._strategies[strategy_cls]
|
||||
except KeyError:
|
||||
return self.__init_strategy(strategy_cls)
|
||||
return self.strategy
|
||||
|
||||
def _get_strategy(self, key):
|
||||
def _get_strategy(self, cls):
|
||||
try:
|
||||
return self._strategies[key]
|
||||
return self._strategies[cls]
|
||||
except KeyError:
|
||||
cls = self._strategy_lookup(*key)
|
||||
self._strategies[key] = self._strategies[cls] = strategy = cls(self)
|
||||
return strategy
|
||||
return self.__init_strategy(cls)
|
||||
|
||||
def _get_strategy_by_cls(self, cls):
|
||||
return self._get_strategy(cls._strategy_keys[0])
|
||||
def __init_strategy(self, cls):
|
||||
self._strategies[cls] = strategy = cls(self)
|
||||
return strategy
|
||||
|
||||
def setup(self, context, entity, path, adapter, **kwargs):
|
||||
loader = self._get_context_loader(context, path)
|
||||
if loader and loader.strategy:
|
||||
strat = self._get_strategy(loader.strategy)
|
||||
else:
|
||||
strat = self.strategy
|
||||
strat.setup_query(context, entity, path, loader, adapter, **kwargs)
|
||||
self._get_context_strategy(context, path).\
|
||||
setup_query(context, entity, path,
|
||||
adapter, **kwargs)
|
||||
|
||||
def create_row_processor(self, context, path, mapper, row, adapter):
|
||||
loader = self._get_context_loader(context, path)
|
||||
if loader and loader.strategy:
|
||||
strat = self._get_strategy(loader.strategy)
|
||||
else:
|
||||
strat = self.strategy
|
||||
return strat.create_row_processor(context, path, loader,
|
||||
return self._get_context_strategy(context, path).\
|
||||
create_row_processor(context, path,
|
||||
mapper, row, adapter)
|
||||
|
||||
def do_init(self):
|
||||
self._strategies = {}
|
||||
self.strategy = self._get_strategy_by_cls(self.strategy_class)
|
||||
self.strategy = self.__init_strategy(self.strategy_class)
|
||||
|
||||
def post_instrument_class(self, mapper):
|
||||
if self.is_primary() and \
|
||||
@ -481,30 +551,6 @@ class StrategizedProperty(MapperProperty):
|
||||
self.strategy.init_class_attribute(mapper)
|
||||
|
||||
|
||||
_strategies = collections.defaultdict(dict)
|
||||
|
||||
@classmethod
|
||||
def strategy_for(cls, **kw):
|
||||
def decorate(dec_cls):
|
||||
dec_cls._strategy_keys = []
|
||||
key = tuple(sorted(kw.items()))
|
||||
cls._strategies[cls][key] = dec_cls
|
||||
dec_cls._strategy_keys.append(key)
|
||||
return dec_cls
|
||||
return decorate
|
||||
|
||||
@classmethod
|
||||
def _strategy_lookup(cls, *key):
|
||||
for prop_cls in cls.__mro__:
|
||||
if prop_cls in cls._strategies:
|
||||
strategies = cls._strategies[prop_cls]
|
||||
try:
|
||||
return strategies[key]
|
||||
except KeyError:
|
||||
pass
|
||||
raise Exception("can't locate strategy for %s %s" % (cls, key))
|
||||
|
||||
|
||||
class MapperOption(object):
|
||||
"""Describe a modification to a Query."""
|
||||
|
||||
@ -526,6 +572,241 @@ class MapperOption(object):
|
||||
self.process_query(query)
|
||||
|
||||
|
||||
class PropertyOption(MapperOption):
|
||||
"""A MapperOption that is applied to a property off the mapper or
|
||||
one of its child mappers, identified by a dot-separated key
|
||||
or list of class-bound attributes. """
|
||||
|
||||
def __init__(self, key, mapper=None):
|
||||
self.key = key
|
||||
self.mapper = mapper
|
||||
|
||||
def process_query(self, query):
|
||||
self._process(query, True)
|
||||
|
||||
def process_query_conditionally(self, query):
|
||||
self._process(query, False)
|
||||
|
||||
def _process(self, query, raiseerr):
|
||||
paths = self._process_paths(query, raiseerr)
|
||||
if paths:
|
||||
self.process_query_property(query, paths)
|
||||
|
||||
def process_query_property(self, query, paths):
|
||||
pass
|
||||
|
||||
def __getstate__(self):
|
||||
d = self.__dict__.copy()
|
||||
d['key'] = ret = []
|
||||
for token in util.to_list(self.key):
|
||||
if isinstance(token, PropComparator):
|
||||
ret.append((token._parentmapper.class_, token.key))
|
||||
else:
|
||||
ret.append(token)
|
||||
return d
|
||||
|
||||
def __setstate__(self, state):
|
||||
ret = []
|
||||
for key in state['key']:
|
||||
if isinstance(key, tuple):
|
||||
cls, propkey = key
|
||||
ret.append(getattr(cls, propkey))
|
||||
else:
|
||||
ret.append(key)
|
||||
state['key'] = tuple(ret)
|
||||
self.__dict__ = state
|
||||
|
||||
def _find_entity_prop_comparator(self, query, token, mapper, raiseerr):
|
||||
if orm_util._is_aliased_class(mapper):
|
||||
searchfor = mapper
|
||||
else:
|
||||
searchfor = orm_util._class_to_mapper(mapper)
|
||||
for ent in query._mapper_entities:
|
||||
if ent.corresponds_to(searchfor):
|
||||
return ent
|
||||
else:
|
||||
if raiseerr:
|
||||
if not list(query._mapper_entities):
|
||||
raise sa_exc.ArgumentError(
|
||||
"Query has only expression-based entities - "
|
||||
"can't find property named '%s'."
|
||||
% (token, )
|
||||
)
|
||||
else:
|
||||
raise sa_exc.ArgumentError(
|
||||
"Can't find property '%s' on any entity "
|
||||
"specified in this Query. Note the full path "
|
||||
"from root (%s) to target entity must be specified."
|
||||
% (token, ",".join(str(x) for
|
||||
x in query._mapper_entities))
|
||||
)
|
||||
else:
|
||||
return None
|
||||
|
||||
def _find_entity_basestring(self, query, token, raiseerr):
|
||||
for ent in query._mapper_entities:
|
||||
# return only the first _MapperEntity when searching
|
||||
# based on string prop name. Ideally object
|
||||
# attributes are used to specify more exactly.
|
||||
return ent
|
||||
else:
|
||||
if raiseerr:
|
||||
raise sa_exc.ArgumentError(
|
||||
"Query has only expression-based entities - "
|
||||
"can't find property named '%s'."
|
||||
% (token, )
|
||||
)
|
||||
else:
|
||||
return None
|
||||
|
||||
def _process_paths(self, query, raiseerr):
|
||||
"""reconcile the 'key' for this PropertyOption with
|
||||
the current path and entities of the query.
|
||||
|
||||
Return a list of affected paths.
|
||||
|
||||
"""
|
||||
path = orm_util.PathRegistry.root
|
||||
entity = None
|
||||
paths = []
|
||||
no_result = []
|
||||
|
||||
# _current_path implies we're in a
|
||||
# secondary load with an existing path
|
||||
current_path = list(query._current_path.path)
|
||||
|
||||
tokens = deque(self.key)
|
||||
while tokens:
|
||||
token = tokens.popleft()
|
||||
if isinstance(token, basestring):
|
||||
# wildcard token
|
||||
if token.endswith(':*'):
|
||||
return [path.token(token)]
|
||||
sub_tokens = token.split(".", 1)
|
||||
token = sub_tokens[0]
|
||||
tokens.extendleft(sub_tokens[1:])
|
||||
|
||||
# exhaust current_path before
|
||||
# matching tokens to entities
|
||||
if current_path:
|
||||
if current_path[1].key == token:
|
||||
current_path = current_path[2:]
|
||||
continue
|
||||
else:
|
||||
return no_result
|
||||
|
||||
if not entity:
|
||||
entity = self._find_entity_basestring(
|
||||
query,
|
||||
token,
|
||||
raiseerr)
|
||||
if entity is None:
|
||||
return no_result
|
||||
path_element = entity.entity_zero
|
||||
mapper = entity.mapper
|
||||
|
||||
if hasattr(mapper.class_, token):
|
||||
prop = getattr(mapper.class_, token).property
|
||||
else:
|
||||
if raiseerr:
|
||||
raise sa_exc.ArgumentError(
|
||||
"Can't find property named '%s' on the "
|
||||
"mapped entity %s in this Query. " % (
|
||||
token, mapper)
|
||||
)
|
||||
else:
|
||||
return no_result
|
||||
elif isinstance(token, PropComparator):
|
||||
prop = token.property
|
||||
|
||||
# exhaust current_path before
|
||||
# matching tokens to entities
|
||||
if current_path:
|
||||
if current_path[0:2] == \
|
||||
[token._parententity, prop]:
|
||||
current_path = current_path[2:]
|
||||
continue
|
||||
else:
|
||||
return no_result
|
||||
|
||||
if not entity:
|
||||
entity = self._find_entity_prop_comparator(
|
||||
query,
|
||||
prop.key,
|
||||
token._parententity,
|
||||
raiseerr)
|
||||
if not entity:
|
||||
return no_result
|
||||
|
||||
path_element = entity.entity_zero
|
||||
mapper = entity.mapper
|
||||
else:
|
||||
raise sa_exc.ArgumentError(
|
||||
"mapper option expects "
|
||||
"string key or list of attributes")
|
||||
assert prop is not None
|
||||
if raiseerr and not prop.parent.common_parent(mapper):
|
||||
raise sa_exc.ArgumentError("Attribute '%s' does not "
|
||||
"link from element '%s'" % (token, path_element))
|
||||
|
||||
path = path[path_element][prop]
|
||||
|
||||
paths.append(path)
|
||||
|
||||
if getattr(token, '_of_type', None):
|
||||
ac = token._of_type
|
||||
ext_info = inspect(ac)
|
||||
path_element = mapper = ext_info.mapper
|
||||
if not ext_info.is_aliased_class:
|
||||
ac = orm_util.with_polymorphic(
|
||||
ext_info.mapper.base_mapper,
|
||||
ext_info.mapper, aliased=True,
|
||||
_use_mapper_path=True)
|
||||
ext_info = inspect(ac)
|
||||
path.set(query, "path_with_polymorphic", ext_info)
|
||||
else:
|
||||
path_element = mapper = getattr(prop, 'mapper', None)
|
||||
if mapper is None and tokens:
|
||||
raise sa_exc.ArgumentError(
|
||||
"Attribute '%s' of entity '%s' does not "
|
||||
"refer to a mapped entity" %
|
||||
(token, entity)
|
||||
)
|
||||
|
||||
if current_path:
|
||||
# ran out of tokens before
|
||||
# current_path was exhausted.
|
||||
assert not tokens
|
||||
return no_result
|
||||
|
||||
return paths
|
||||
|
||||
|
||||
class StrategizedOption(PropertyOption):
|
||||
"""A MapperOption that affects which LoaderStrategy will be used
|
||||
for an operation by a StrategizedProperty.
|
||||
"""
|
||||
|
||||
chained = False
|
||||
|
||||
def process_query_property(self, query, paths):
|
||||
strategy = self.get_strategy_class()
|
||||
if self.chained:
|
||||
for path in paths:
|
||||
path.set(
|
||||
query,
|
||||
"loaderstrategy",
|
||||
strategy
|
||||
)
|
||||
else:
|
||||
paths[-1].set(
|
||||
query,
|
||||
"loaderstrategy",
|
||||
strategy
|
||||
)
|
||||
|
||||
def get_strategy_class(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class LoaderStrategy(object):
|
||||
@ -560,10 +841,10 @@ class LoaderStrategy(object):
|
||||
def init_class_attribute(self, mapper):
|
||||
pass
|
||||
|
||||
def setup_query(self, context, entity, path, loadopt, adapter, **kwargs):
|
||||
def setup_query(self, context, entity, path, adapter, **kwargs):
|
||||
pass
|
||||
|
||||
def create_row_processor(self, context, path, loadopt, mapper,
|
||||
def create_row_processor(self, context, path, mapper,
|
||||
row, adapter):
|
||||
"""Return row processing functions which fulfill the contract
|
||||
specified by MapperProperty.create_row_processor.
|
||||
|
@ -11,7 +11,7 @@ the functions here are called primarily by Query, Mapper,
|
||||
as well as some of the attribute loading strategies.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .. import util
|
||||
from . import attributes, exc as orm_exc, state as statelib
|
||||
@ -19,6 +19,7 @@ from .interfaces import EXT_CONTINUE
|
||||
from ..sql import util as sql_util
|
||||
from .util import _none_set, state_str
|
||||
from .. import exc as sa_exc
|
||||
sessionlib = util.importlater("sqlalchemy.orm", "session")
|
||||
|
||||
_new_runid = util.counter()
|
||||
|
||||
@ -33,8 +34,7 @@ def instances(query, cursor, context):
|
||||
for ent in query._entities]
|
||||
filtered = id in filter_fns
|
||||
|
||||
single_entity = len(query._entities) == 1 and \
|
||||
query._entities[0].supports_single_entity
|
||||
single_entity = filtered and len(query._entities) == 1
|
||||
|
||||
if filtered:
|
||||
if single_entity:
|
||||
@ -44,14 +44,14 @@ def instances(query, cursor, context):
|
||||
return tuple(fn(x) for x, fn in zip(row, filter_fns))
|
||||
|
||||
custom_rows = single_entity and \
|
||||
query._entities[0].custom_rows
|
||||
query._entities[0].mapper.dispatch.append_result
|
||||
|
||||
(process, labels) = \
|
||||
list(zip(*[
|
||||
zip(*[
|
||||
query_entity.row_processor(query,
|
||||
context, custom_rows)
|
||||
for query_entity in query._entities
|
||||
]))
|
||||
])
|
||||
|
||||
while True:
|
||||
context.progress = {}
|
||||
@ -84,11 +84,11 @@ def instances(query, cursor, context):
|
||||
context.progress.pop(context.refresh_state)
|
||||
|
||||
statelib.InstanceState._commit_all_states(
|
||||
list(context.progress.items()),
|
||||
context.progress.items(),
|
||||
session.identity_map
|
||||
)
|
||||
|
||||
for state, (dict_, attrs) in context.partials.items():
|
||||
for state, (dict_, attrs) in context.partials.iteritems():
|
||||
state._commit(dict_, attrs)
|
||||
|
||||
for row in rows:
|
||||
@ -98,10 +98,11 @@ def instances(query, cursor, context):
|
||||
break
|
||||
|
||||
|
||||
@util.dependencies("sqlalchemy.orm.query")
|
||||
def merge_result(querylib, query, iterator, load=True):
|
||||
def merge_result(query, iterator, load=True):
|
||||
"""Merge a result into this :class:`.Query` object's Session."""
|
||||
|
||||
from . import query as querylib
|
||||
|
||||
session = query.session
|
||||
if load:
|
||||
# flush current contents if we expect to load data
|
||||
@ -174,6 +175,8 @@ def load_on_ident(query, key,
|
||||
only_load_props=None):
|
||||
"""Load the given identity key from the database."""
|
||||
|
||||
lockmode = lockmode or query._lockmode
|
||||
|
||||
if key is not None:
|
||||
ident = key[1]
|
||||
else:
|
||||
@ -211,17 +214,10 @@ def load_on_ident(query, key,
|
||||
q._params = params
|
||||
|
||||
if lockmode is not None:
|
||||
version_check = True
|
||||
q = q.with_lockmode(lockmode)
|
||||
elif query._for_update_arg is not None:
|
||||
version_check = True
|
||||
q._for_update_arg = query._for_update_arg
|
||||
else:
|
||||
version_check = False
|
||||
|
||||
q._lockmode = lockmode
|
||||
q._get_options(
|
||||
populate_existing=bool(refresh_state),
|
||||
version_check=version_check,
|
||||
version_check=(lockmode is not None),
|
||||
only_load_props=only_load_props,
|
||||
refresh_state=refresh_state)
|
||||
q._order_by = None
|
||||
@ -361,7 +357,6 @@ def instance_processor(mapper, context, path, adapter,
|
||||
)
|
||||
|
||||
instance = session_identity_map.get(identitykey)
|
||||
|
||||
if instance is not None:
|
||||
state = attributes.instance_state(instance)
|
||||
dict_ = attributes.instance_dict(instance)
|
||||
@ -512,7 +507,7 @@ def _populators(mapper, context, path, row, adapter,
|
||||
pops = (new_populators, existing_populators, delayed_populators,
|
||||
eager_populators)
|
||||
|
||||
for prop in mapper._props.values():
|
||||
for prop in mapper._props.itervalues():
|
||||
|
||||
for i, pop in enumerate(prop.create_row_processor(
|
||||
context,
|
||||
@ -552,7 +547,7 @@ def load_scalar_attributes(mapper, state, attribute_names):
|
||||
"""initiate a column-based attribute refresh operation."""
|
||||
|
||||
#assert mapper is _state_mapper(state)
|
||||
session = state.session
|
||||
session = sessionlib._state_session(state)
|
||||
if not session:
|
||||
raise orm_exc.DetachedInstanceError(
|
||||
"Instance %s is not bound to a Session; "
|
||||
|
@ -14,7 +14,6 @@ available in :class:`~sqlalchemy.orm.`.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import types
|
||||
import weakref
|
||||
from itertools import chain
|
||||
@ -22,18 +21,26 @@ from collections import deque
|
||||
|
||||
from .. import sql, util, log, exc as sa_exc, event, schema, inspection
|
||||
from ..sql import expression, visitors, operators, util as sql_util
|
||||
from . import instrumentation, attributes, exc as orm_exc, loading
|
||||
from . import properties
|
||||
from . import instrumentation, attributes, \
|
||||
exc as orm_exc, events, loading
|
||||
from .interfaces import MapperProperty, _InspectionAttr, _MappedAttribute
|
||||
|
||||
from .base import _class_to_mapper, _state_mapper, class_mapper, \
|
||||
state_str, _INSTRUMENTOR
|
||||
from .path_registry import PathRegistry
|
||||
|
||||
from .util import _INSTRUMENTOR, _class_to_mapper, \
|
||||
_state_mapper, class_mapper, \
|
||||
PathRegistry, state_str
|
||||
import sys
|
||||
properties = util.importlater("sqlalchemy.orm", "properties")
|
||||
descriptor_props = util.importlater("sqlalchemy.orm", "descriptor_props")
|
||||
|
||||
__all__ = (
|
||||
'Mapper',
|
||||
'_mapper_registry',
|
||||
'class_mapper',
|
||||
'object_mapper',
|
||||
)
|
||||
|
||||
_mapper_registry = weakref.WeakKeyDictionary()
|
||||
_new_mappers = False
|
||||
_already_compiling = False
|
||||
|
||||
_memoized_configured_property = util.group_expirable_memoized_property()
|
||||
@ -48,8 +55,6 @@ NO_ATTRIBUTE = util.symbol('NO_ATTRIBUTE')
|
||||
_CONFIGURE_MUTEX = util.threading.RLock()
|
||||
|
||||
|
||||
@inspection._self_inspects
|
||||
@log.class_logger
|
||||
class Mapper(_InspectionAttr):
|
||||
"""Define the correlation of class attributes to database table
|
||||
columns.
|
||||
@ -82,12 +87,9 @@ class Mapper(_InspectionAttr):
|
||||
|
||||
|
||||
"""
|
||||
|
||||
_new_mappers = False
|
||||
|
||||
def __init__(self,
|
||||
class_,
|
||||
local_table=None,
|
||||
local_table,
|
||||
properties=None,
|
||||
primary_key=None,
|
||||
non_primary=False,
|
||||
@ -110,400 +112,14 @@ class Mapper(_InspectionAttr):
|
||||
include_properties=None,
|
||||
exclude_properties=None,
|
||||
passive_updates=True,
|
||||
confirm_deleted_rows=True,
|
||||
eager_defaults=False,
|
||||
legacy_is_orphan=False,
|
||||
_compiled_cache_size=100,
|
||||
):
|
||||
"""Return a new :class:`~.Mapper` object.
|
||||
"""Construct a new mapper.
|
||||
|
||||
This function is typically used behind the scenes
|
||||
via the Declarative extension. When using Declarative,
|
||||
many of the usual :func:`.mapper` arguments are handled
|
||||
by the Declarative extension itself, including ``class_``,
|
||||
``local_table``, ``properties``, and ``inherits``.
|
||||
Other options are passed to :func:`.mapper` using
|
||||
the ``__mapper_args__`` class variable::
|
||||
|
||||
class MyClass(Base):
|
||||
__tablename__ = 'my_table'
|
||||
id = Column(Integer, primary_key=True)
|
||||
type = Column(String(50))
|
||||
alt = Column("some_alt", Integer)
|
||||
|
||||
__mapper_args__ = {
|
||||
'polymorphic_on' : type
|
||||
}
|
||||
|
||||
|
||||
Explicit use of :func:`.mapper`
|
||||
is often referred to as *classical mapping*. The above
|
||||
declarative example is equivalent in classical form to::
|
||||
|
||||
my_table = Table("my_table", metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('type', String(50)),
|
||||
Column("some_alt", Integer)
|
||||
)
|
||||
|
||||
class MyClass(object):
|
||||
pass
|
||||
|
||||
mapper(MyClass, my_table,
|
||||
polymorphic_on=my_table.c.type,
|
||||
properties={
|
||||
'alt':my_table.c.some_alt
|
||||
})
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`classical_mapping` - discussion of direct usage of
|
||||
:func:`.mapper`
|
||||
|
||||
:param class\_: The class to be mapped. When using Declarative,
|
||||
this argument is automatically passed as the declared class
|
||||
itself.
|
||||
|
||||
:param local_table: The :class:`.Table` or other selectable
|
||||
to which the class is mapped. May be ``None`` if
|
||||
this mapper inherits from another mapper using single-table
|
||||
inheritance. When using Declarative, this argument is
|
||||
automatically passed by the extension, based on what
|
||||
is configured via the ``__table__`` argument or via the
|
||||
:class:`.Table` produced as a result of the ``__tablename__``
|
||||
and :class:`.Column` arguments present.
|
||||
|
||||
:param always_refresh: If True, all query operations for this mapped
|
||||
class will overwrite all data within object instances that already
|
||||
exist within the session, erasing any in-memory changes with
|
||||
whatever information was loaded from the database. Usage of this
|
||||
flag is highly discouraged; as an alternative, see the method
|
||||
:meth:`.Query.populate_existing`.
|
||||
|
||||
:param allow_partial_pks: Defaults to True. Indicates that a
|
||||
composite primary key with some NULL values should be considered as
|
||||
possibly existing within the database. This affects whether a
|
||||
mapper will assign an incoming row to an existing identity, as well
|
||||
as if :meth:`.Session.merge` will check the database first for a
|
||||
particular primary key value. A "partial primary key" can occur if
|
||||
one has mapped to an OUTER JOIN, for example.
|
||||
|
||||
:param batch: Defaults to ``True``, indicating that save operations
|
||||
of multiple entities can be batched together for efficiency.
|
||||
Setting to False indicates
|
||||
that an instance will be fully saved before saving the next
|
||||
instance. This is used in the extremely rare case that a
|
||||
:class:`.MapperEvents` listener requires being called
|
||||
in between individual row persistence operations.
|
||||
|
||||
:param column_prefix: A string which will be prepended
|
||||
to the mapped attribute name when :class:`.Column`
|
||||
objects are automatically assigned as attributes to the
|
||||
mapped class. Does not affect explicitly specified
|
||||
column-based properties.
|
||||
|
||||
See the section :ref:`column_prefix` for an example.
|
||||
|
||||
:param concrete: If True, indicates this mapper should use concrete
|
||||
table inheritance with its parent mapper.
|
||||
|
||||
See the section :ref:`concrete_inheritance` for an example.
|
||||
|
||||
:param confirm_deleted_rows: defaults to True; when a DELETE occurs
|
||||
of one more rows based on specific primary keys, a warning is
|
||||
emitted when the number of rows matched does not equal the number
|
||||
of rows expected. This parameter may be set to False to handle the case
|
||||
where database ON DELETE CASCADE rules may be deleting some of those
|
||||
rows automatically. The warning may be changed to an exception
|
||||
in a future release.
|
||||
|
||||
.. versionadded:: 0.9.4 - added :paramref:`.mapper.confirm_deleted_rows`
|
||||
as well as conditional matched row checking on delete.
|
||||
|
||||
:param eager_defaults: if True, the ORM will immediately fetch the
|
||||
value of server-generated default values after an INSERT or UPDATE,
|
||||
rather than leaving them as expired to be fetched on next access.
|
||||
This can be used for event schemes where the server-generated values
|
||||
are needed immediately before the flush completes. By default,
|
||||
this scheme will emit an individual ``SELECT`` statement per row
|
||||
inserted or updated, which note can add significant performance
|
||||
overhead. However, if the
|
||||
target database supports :term:`RETURNING`, the default values will be
|
||||
returned inline with the INSERT or UPDATE statement, which can
|
||||
greatly enhance performance for an application that needs frequent
|
||||
access to just-generated server defaults.
|
||||
|
||||
.. versionchanged:: 0.9.0 The ``eager_defaults`` option can now
|
||||
make use of :term:`RETURNING` for backends which support it.
|
||||
|
||||
:param exclude_properties: A list or set of string column names to
|
||||
be excluded from mapping.
|
||||
|
||||
See :ref:`include_exclude_cols` for an example.
|
||||
|
||||
:param extension: A :class:`.MapperExtension` instance or
|
||||
list of :class:`.MapperExtension` instances which will be applied
|
||||
to all operations by this :class:`.Mapper`. **Deprecated.**
|
||||
Please see :class:`.MapperEvents`.
|
||||
|
||||
:param include_properties: An inclusive list or set of string column
|
||||
names to map.
|
||||
|
||||
See :ref:`include_exclude_cols` for an example.
|
||||
|
||||
:param inherits: A mapped class or the corresponding :class:`.Mapper`
|
||||
of one indicating a superclass to which this :class:`.Mapper`
|
||||
should *inherit* from. The mapped class here must be a subclass
|
||||
of the other mapper's class. When using Declarative, this argument
|
||||
is passed automatically as a result of the natural class
|
||||
hierarchy of the declared classes.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`inheritance_toplevel`
|
||||
|
||||
:param inherit_condition: For joined table inheritance, a SQL
|
||||
expression which will
|
||||
define how the two tables are joined; defaults to a natural join
|
||||
between the two tables.
|
||||
|
||||
:param inherit_foreign_keys: When ``inherit_condition`` is used and the
|
||||
columns present are missing a :class:`.ForeignKey` configuration,
|
||||
this parameter can be used to specify which columns are "foreign".
|
||||
In most cases can be left as ``None``.
|
||||
|
||||
:param legacy_is_orphan: Boolean, defaults to ``False``.
|
||||
When ``True``, specifies that "legacy" orphan consideration
|
||||
is to be applied to objects mapped by this mapper, which means
|
||||
that a pending (that is, not persistent) object is auto-expunged
|
||||
from an owning :class:`.Session` only when it is de-associated
|
||||
from *all* parents that specify a ``delete-orphan`` cascade towards
|
||||
this mapper. The new default behavior is that the object is auto-expunged
|
||||
when it is de-associated with *any* of its parents that specify
|
||||
``delete-orphan`` cascade. This behavior is more consistent with
|
||||
that of a persistent object, and allows behavior to be consistent
|
||||
in more scenarios independently of whether or not an orphanable
|
||||
object has been flushed yet or not.
|
||||
|
||||
See the change note and example at :ref:`legacy_is_orphan_addition`
|
||||
for more detail on this change.
|
||||
|
||||
.. versionadded:: 0.8 - the consideration of a pending object as
|
||||
an "orphan" has been modified to more closely match the
|
||||
behavior as that of persistent objects, which is that the object
|
||||
is expunged from the :class:`.Session` as soon as it is
|
||||
de-associated from any of its orphan-enabled parents. Previously,
|
||||
the pending object would be expunged only if de-associated
|
||||
from all of its orphan-enabled parents. The new flag ``legacy_is_orphan``
|
||||
is added to :func:`.orm.mapper` which re-establishes the
|
||||
legacy behavior.
|
||||
|
||||
:param non_primary: Specify that this :class:`.Mapper` is in addition
|
||||
to the "primary" mapper, that is, the one used for persistence.
|
||||
The :class:`.Mapper` created here may be used for ad-hoc
|
||||
mapping of the class to an alternate selectable, for loading
|
||||
only.
|
||||
|
||||
:paramref:`.Mapper.non_primary` is not an often used option, but
|
||||
is useful in some specific :func:`.relationship` cases.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`relationship_non_primary_mapper`
|
||||
|
||||
:param order_by: A single :class:`.Column` or list of :class:`.Column`
|
||||
objects for which selection operations should use as the default
|
||||
ordering for entities. By default mappers have no pre-defined
|
||||
ordering.
|
||||
|
||||
:param passive_updates: Indicates UPDATE behavior of foreign key
|
||||
columns when a primary key column changes on a joined-table
|
||||
inheritance mapping. Defaults to ``True``.
|
||||
|
||||
When True, it is assumed that ON UPDATE CASCADE is configured on
|
||||
the foreign key in the database, and that the database will handle
|
||||
propagation of an UPDATE from a source column to dependent columns
|
||||
on joined-table rows.
|
||||
|
||||
When False, it is assumed that the database does not enforce
|
||||
referential integrity and will not be issuing its own CASCADE
|
||||
operation for an update. The unit of work process will
|
||||
emit an UPDATE statement for the dependent columns during a
|
||||
primary key change.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`passive_updates` - description of a similar feature as
|
||||
used with :func:`.relationship`
|
||||
|
||||
:param polymorphic_on: Specifies the column, attribute, or
|
||||
SQL expression used to determine the target class for an
|
||||
incoming row, when inheriting classes are present.
|
||||
|
||||
This value is commonly a :class:`.Column` object that's
|
||||
present in the mapped :class:`.Table`::
|
||||
|
||||
class Employee(Base):
|
||||
__tablename__ = 'employee'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
discriminator = Column(String(50))
|
||||
|
||||
__mapper_args__ = {
|
||||
"polymorphic_on":discriminator,
|
||||
"polymorphic_identity":"employee"
|
||||
}
|
||||
|
||||
It may also be specified
|
||||
as a SQL expression, as in this example where we
|
||||
use the :func:`.case` construct to provide a conditional
|
||||
approach::
|
||||
|
||||
class Employee(Base):
|
||||
__tablename__ = 'employee'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
discriminator = Column(String(50))
|
||||
|
||||
__mapper_args__ = {
|
||||
"polymorphic_on":case([
|
||||
(discriminator == "EN", "engineer"),
|
||||
(discriminator == "MA", "manager"),
|
||||
], else_="employee"),
|
||||
"polymorphic_identity":"employee"
|
||||
}
|
||||
|
||||
It may also refer to any attribute
|
||||
configured with :func:`.column_property`, or to the
|
||||
string name of one::
|
||||
|
||||
class Employee(Base):
|
||||
__tablename__ = 'employee'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
discriminator = Column(String(50))
|
||||
employee_type = column_property(
|
||||
case([
|
||||
(discriminator == "EN", "engineer"),
|
||||
(discriminator == "MA", "manager"),
|
||||
], else_="employee")
|
||||
)
|
||||
|
||||
__mapper_args__ = {
|
||||
"polymorphic_on":employee_type,
|
||||
"polymorphic_identity":"employee"
|
||||
}
|
||||
|
||||
.. versionchanged:: 0.7.4
|
||||
``polymorphic_on`` may be specified as a SQL expression,
|
||||
or refer to any attribute configured with
|
||||
:func:`.column_property`, or to the string name of one.
|
||||
|
||||
When setting ``polymorphic_on`` to reference an
|
||||
attribute or expression that's not present in the
|
||||
locally mapped :class:`.Table`, yet the value
|
||||
of the discriminator should be persisted to the database,
|
||||
the value of the
|
||||
discriminator is not automatically set on new
|
||||
instances; this must be handled by the user,
|
||||
either through manual means or via event listeners.
|
||||
A typical approach to establishing such a listener
|
||||
looks like::
|
||||
|
||||
from sqlalchemy import event
|
||||
from sqlalchemy.orm import object_mapper
|
||||
|
||||
@event.listens_for(Employee, "init", propagate=True)
|
||||
def set_identity(instance, *arg, **kw):
|
||||
mapper = object_mapper(instance)
|
||||
instance.discriminator = mapper.polymorphic_identity
|
||||
|
||||
Where above, we assign the value of ``polymorphic_identity``
|
||||
for the mapped class to the ``discriminator`` attribute,
|
||||
thus persisting the value to the ``discriminator`` column
|
||||
in the database.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`inheritance_toplevel`
|
||||
|
||||
:param polymorphic_identity: Specifies the value which
|
||||
identifies this particular class as returned by the
|
||||
column expression referred to by the ``polymorphic_on``
|
||||
setting. As rows are received, the value corresponding
|
||||
to the ``polymorphic_on`` column expression is compared
|
||||
to this value, indicating which subclass should
|
||||
be used for the newly reconstructed object.
|
||||
|
||||
:param properties: A dictionary mapping the string names of object
|
||||
attributes to :class:`.MapperProperty` instances, which define the
|
||||
persistence behavior of that attribute. Note that :class:`.Column`
|
||||
objects present in
|
||||
the mapped :class:`.Table` are automatically placed into
|
||||
``ColumnProperty`` instances upon mapping, unless overridden.
|
||||
When using Declarative, this argument is passed automatically,
|
||||
based on all those :class:`.MapperProperty` instances declared
|
||||
in the declared class body.
|
||||
|
||||
:param primary_key: A list of :class:`.Column` objects which define the
|
||||
primary key to be used against this mapper's selectable unit.
|
||||
This is normally simply the primary key of the ``local_table``, but
|
||||
can be overridden here.
|
||||
|
||||
:param version_id_col: A :class:`.Column`
|
||||
that will be used to keep a running version id of rows
|
||||
in the table. This is used to detect concurrent updates or
|
||||
the presence of stale data in a flush. The methodology is to
|
||||
detect if an UPDATE statement does not match the last known
|
||||
version id, a
|
||||
:class:`~sqlalchemy.orm.exc.StaleDataError` exception is
|
||||
thrown.
|
||||
By default, the column must be of :class:`.Integer` type,
|
||||
unless ``version_id_generator`` specifies an alternative version
|
||||
generator.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`mapper_version_counter` - discussion of version counting
|
||||
and rationale.
|
||||
|
||||
:param version_id_generator: Define how new version ids should
|
||||
be generated. Defaults to ``None``, which indicates that
|
||||
a simple integer counting scheme be employed. To provide a custom
|
||||
versioning scheme, provide a callable function of the form::
|
||||
|
||||
def generate_version(version):
|
||||
return next_version
|
||||
|
||||
Alternatively, server-side versioning functions such as triggers,
|
||||
or programmatic versioning schemes outside of the version id generator
|
||||
may be used, by specifying the value ``False``.
|
||||
Please see :ref:`server_side_version_counter` for a discussion
|
||||
of important points when using this option.
|
||||
|
||||
.. versionadded:: 0.9.0 ``version_id_generator`` supports server-side
|
||||
version number generation.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`custom_version_counter`
|
||||
|
||||
:ref:`server_side_version_counter`
|
||||
|
||||
|
||||
:param with_polymorphic: A tuple in the form ``(<classes>,
|
||||
<selectable>)`` indicating the default style of "polymorphic"
|
||||
loading, that is, which tables are queried at once. <classes> is
|
||||
any single or list of mappers and/or classes indicating the
|
||||
inherited classes that should be loaded at once. The special value
|
||||
``'*'`` may be used to indicate all descending classes should be
|
||||
loaded immediately. The second tuple argument <selectable>
|
||||
indicates a selectable that will be used to query for multiple
|
||||
classes.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`with_polymorphic` - discussion of polymorphic querying techniques.
|
||||
Mappers are normally constructed via the
|
||||
:func:`~sqlalchemy.orm.mapper` function. See for details.
|
||||
|
||||
"""
|
||||
|
||||
@ -520,19 +136,9 @@ class Mapper(_InspectionAttr):
|
||||
self.order_by = order_by
|
||||
|
||||
self.always_refresh = always_refresh
|
||||
|
||||
if isinstance(version_id_col, MapperProperty):
|
||||
self.version_id_prop = version_id_col
|
||||
self.version_id_col = None
|
||||
else:
|
||||
self.version_id_col = version_id_col
|
||||
if version_id_generator is False:
|
||||
self.version_id_generator = False
|
||||
elif version_id_generator is None:
|
||||
self.version_id_generator = lambda x: (x or 0) + 1
|
||||
else:
|
||||
self.version_id_generator = version_id_generator
|
||||
|
||||
self.version_id_col = version_id_col
|
||||
self.version_id_generator = version_id_generator or \
|
||||
(lambda x: (x or 0) + 1)
|
||||
self.concrete = concrete
|
||||
self.single = False
|
||||
self.inherits = inherits
|
||||
@ -557,12 +163,8 @@ class Mapper(_InspectionAttr):
|
||||
self._compiled_cache_size = _compiled_cache_size
|
||||
self._reconstructor = None
|
||||
self._deprecated_extensions = util.to_list(extension or [])
|
||||
self.allow_partial_pks = allow_partial_pks
|
||||
|
||||
if self.inherits and not self.concrete:
|
||||
self.confirm_deleted_rows = False
|
||||
else:
|
||||
self.confirm_deleted_rows = confirm_deleted_rows
|
||||
self.allow_partial_pks = allow_partial_pks
|
||||
|
||||
self._set_with_polymorphic(with_polymorphic)
|
||||
|
||||
@ -609,7 +211,7 @@ class Mapper(_InspectionAttr):
|
||||
# configure_mappers() until construction succeeds)
|
||||
_CONFIGURE_MUTEX.acquire()
|
||||
try:
|
||||
self.dispatch._events._new_mapper_instance(class_, self)
|
||||
events._MapperEventsHold.populate(class_, self)
|
||||
self._configure_inheritance()
|
||||
self._configure_legacy_instrument_class()
|
||||
self._configure_class_instrumentation()
|
||||
@ -617,7 +219,8 @@ class Mapper(_InspectionAttr):
|
||||
self._configure_properties()
|
||||
self._configure_polymorphic_setter()
|
||||
self._configure_pks()
|
||||
Mapper._new_mappers = True
|
||||
global _new_mappers
|
||||
_new_mappers = True
|
||||
self._log("constructed")
|
||||
self._expire_memoizations()
|
||||
finally:
|
||||
@ -874,6 +477,8 @@ class Mapper(_InspectionAttr):
|
||||
c = None
|
||||
"""A synonym for :attr:`~.Mapper.columns`."""
|
||||
|
||||
dispatch = event.dispatcher(events.MapperEvents)
|
||||
|
||||
@util.memoized_property
|
||||
def _path_registry(self):
|
||||
return PathRegistry.per_mapper(self)
|
||||
@ -913,7 +518,7 @@ class Mapper(_InspectionAttr):
|
||||
if self.inherit_condition is None:
|
||||
# figure out inherit condition from our table to the
|
||||
# immediate table of the inherited mapper, not its
|
||||
# full table which could pull in other stuff we don't
|
||||
# full table which could pull in other stuff we dont
|
||||
# want (allows test/inheritance.InheritTest4 to pass)
|
||||
self.inherit_condition = sql_util.join_condition(
|
||||
self.inherits.local_table,
|
||||
@ -982,7 +587,7 @@ class Mapper(_InspectionAttr):
|
||||
if with_polymorphic == '*':
|
||||
self.with_polymorphic = ('*', None)
|
||||
elif isinstance(with_polymorphic, (tuple, list)):
|
||||
if isinstance(with_polymorphic[0], util.string_types + (tuple, list)):
|
||||
if isinstance(with_polymorphic[0], (basestring, tuple, list)):
|
||||
self.with_polymorphic = with_polymorphic
|
||||
else:
|
||||
self.with_polymorphic = (with_polymorphic, None)
|
||||
@ -1027,7 +632,7 @@ class Mapper(_InspectionAttr):
|
||||
self.inherits._inheriting_mappers.append(self)
|
||||
self.passive_updates = self.inherits.passive_updates
|
||||
self._all_tables = self.inherits._all_tables
|
||||
for key, prop in mapper._props.items():
|
||||
for key, prop in mapper._props.iteritems():
|
||||
if key not in self._props and \
|
||||
not self._should_exclude(key, key, local=False,
|
||||
column=None):
|
||||
@ -1129,20 +734,30 @@ class Mapper(_InspectionAttr):
|
||||
self._reconstructor = method
|
||||
event.listen(manager, 'load', _event_on_load, raw=True)
|
||||
elif hasattr(method, '__sa_validators__'):
|
||||
validation_opts = method.__sa_validation_opts__
|
||||
include_removes = getattr(method,
|
||||
"__sa_include_removes__", False)
|
||||
for name in method.__sa_validators__:
|
||||
self.validators = self.validators.union(
|
||||
{name: (method, validation_opts)}
|
||||
{name: (method, include_removes)}
|
||||
)
|
||||
|
||||
manager.info[_INSTRUMENTOR] = self
|
||||
|
||||
@util.deprecated("0.7", message=":meth:`.Mapper.compile` "
|
||||
"is replaced by :func:`.configure_mappers`")
|
||||
def compile(self):
|
||||
"""Initialize the inter-mapper relationships of all mappers that
|
||||
have been constructed thus far.
|
||||
|
||||
@classmethod
|
||||
def _configure_all(cls):
|
||||
"""Class-level path to the :func:`.configure_mappers` call.
|
||||
"""
|
||||
configure_mappers()
|
||||
return self
|
||||
|
||||
@property
|
||||
@util.deprecated("0.7", message=":attr:`.Mapper.compiled` "
|
||||
"is replaced by :attr:`.Mapper.configured`")
|
||||
def compiled(self):
|
||||
return self.configured
|
||||
|
||||
def dispose(self):
|
||||
# Disable any attribute-based compilation.
|
||||
@ -1257,12 +872,12 @@ class Mapper(_InspectionAttr):
|
||||
|
||||
# load custom properties
|
||||
if self._init_properties:
|
||||
for key, prop in self._init_properties.items():
|
||||
for key, prop in self._init_properties.iteritems():
|
||||
self._configure_property(key, prop, False)
|
||||
|
||||
# pull properties from the inherited mapper if any.
|
||||
if self.inherits:
|
||||
for key, prop in self.inherits._props.items():
|
||||
for key, prop in self.inherits._props.iteritems():
|
||||
if key not in self._props and \
|
||||
not self._should_exclude(key, key, local=False,
|
||||
column=None):
|
||||
@ -1310,8 +925,8 @@ class Mapper(_InspectionAttr):
|
||||
if self.polymorphic_on is not None:
|
||||
setter = True
|
||||
|
||||
if isinstance(self.polymorphic_on, util.string_types):
|
||||
# polymorphic_on specified as a string - link
|
||||
if isinstance(self.polymorphic_on, basestring):
|
||||
# polymorphic_on specified as as string - link
|
||||
# it to mapped ColumnProperty
|
||||
try:
|
||||
self.polymorphic_on = self._props[self.polymorphic_on]
|
||||
@ -1340,7 +955,7 @@ class Mapper(_InspectionAttr):
|
||||
prop = self.polymorphic_on
|
||||
self.polymorphic_on = prop.columns[0]
|
||||
polymorphic_key = prop.key
|
||||
elif not expression._is_column(self.polymorphic_on):
|
||||
elif not expression.is_column(self.polymorphic_on):
|
||||
# polymorphic_on is not a Column and not a ColumnProperty;
|
||||
# not supported right now.
|
||||
raise sa_exc.ArgumentError(
|
||||
@ -1463,13 +1078,6 @@ class Mapper(_InspectionAttr):
|
||||
|
||||
_validate_polymorphic_identity = None
|
||||
|
||||
@_memoized_configured_property
|
||||
def _version_id_prop(self):
|
||||
if self.version_id_col is not None:
|
||||
return self._columntoproperty[self.version_id_col]
|
||||
else:
|
||||
return None
|
||||
|
||||
@_memoized_configured_property
|
||||
def _acceptable_polymorphic_identities(self):
|
||||
identities = set()
|
||||
@ -1596,7 +1204,7 @@ class Mapper(_InspectionAttr):
|
||||
# generate a properties.ColumnProperty
|
||||
columns = util.to_list(prop)
|
||||
column = columns[0]
|
||||
if not expression._is_column(column):
|
||||
if not expression.is_column(column):
|
||||
raise sa_exc.ArgumentError(
|
||||
"%s=%r is not an instance of MapperProperty or Column"
|
||||
% (key, prop))
|
||||
@ -1665,7 +1273,7 @@ class Mapper(_InspectionAttr):
|
||||
"""
|
||||
|
||||
self._log("_post_configure_properties() started")
|
||||
l = [(key, prop) for key, prop in self._props.items()]
|
||||
l = [(key, prop) for key, prop in self._props.iteritems()]
|
||||
for key, prop in l:
|
||||
self._log("initialize prop %s", key)
|
||||
|
||||
@ -1683,7 +1291,7 @@ class Mapper(_InspectionAttr):
|
||||
using `add_property`.
|
||||
|
||||
"""
|
||||
for key, value in dict_of_properties.items():
|
||||
for key, value in dict_of_properties.iteritems():
|
||||
self.add_property(key, value)
|
||||
|
||||
def add_property(self, key, prop):
|
||||
@ -1760,7 +1368,7 @@ class Mapper(_InspectionAttr):
|
||||
"""return a MapperProperty associated with the given key.
|
||||
"""
|
||||
|
||||
if _configure_mappers and Mapper._new_mappers:
|
||||
if _configure_mappers and _new_mappers:
|
||||
configure_mappers()
|
||||
|
||||
try:
|
||||
@ -1778,9 +1386,9 @@ class Mapper(_InspectionAttr):
|
||||
@property
|
||||
def iterate_properties(self):
|
||||
"""return an iterator of all MapperProperty objects."""
|
||||
if Mapper._new_mappers:
|
||||
if _new_mappers:
|
||||
configure_mappers()
|
||||
return iter(self._props.values())
|
||||
return self._props.itervalues()
|
||||
|
||||
def _mappers_from_spec(self, spec, selectable):
|
||||
"""given a with_polymorphic() argument, return the set of mappers it
|
||||
@ -1852,7 +1460,7 @@ class Mapper(_InspectionAttr):
|
||||
|
||||
@_memoized_configured_property
|
||||
def _with_polymorphic_mappers(self):
|
||||
if Mapper._new_mappers:
|
||||
if _new_mappers:
|
||||
configure_mappers()
|
||||
if not self.with_polymorphic:
|
||||
return []
|
||||
@ -1959,7 +1567,7 @@ class Mapper(_InspectionAttr):
|
||||
:attr:`.Mapper.all_orm_descriptors`
|
||||
|
||||
"""
|
||||
if Mapper._new_mappers:
|
||||
if _new_mappers:
|
||||
configure_mappers()
|
||||
return util.ImmutableProperties(self._props)
|
||||
|
||||
@ -2008,7 +1616,7 @@ class Mapper(_InspectionAttr):
|
||||
objects.
|
||||
|
||||
"""
|
||||
return self._filter_properties(properties.SynonymProperty)
|
||||
return self._filter_properties(descriptor_props.SynonymProperty)
|
||||
|
||||
@_memoized_configured_property
|
||||
def column_attrs(self):
|
||||
@ -2047,13 +1655,13 @@ class Mapper(_InspectionAttr):
|
||||
objects.
|
||||
|
||||
"""
|
||||
return self._filter_properties(properties.CompositeProperty)
|
||||
return self._filter_properties(descriptor_props.CompositeProperty)
|
||||
|
||||
def _filter_properties(self, type_):
|
||||
if Mapper._new_mappers:
|
||||
if _new_mappers:
|
||||
configure_mappers()
|
||||
return util.ImmutableProperties(util.OrderedDict(
|
||||
(k, v) for k, v in self._props.items()
|
||||
(k, v) for k, v in self._props.iteritems()
|
||||
if isinstance(v, type_)
|
||||
))
|
||||
|
||||
@ -2463,7 +2071,7 @@ class Mapper(_InspectionAttr):
|
||||
# attempt to skip dependencies that are not
|
||||
# significant to the inheritance chain
|
||||
# for two tables that are related by inheritance.
|
||||
# while that dependency may be important, it's technically
|
||||
# while that dependency may be important, it's techinically
|
||||
# not what we mean to sort on here.
|
||||
parent = table_to_mapper.get(fk.parent.table)
|
||||
dep = table_to_mapper.get(fk.column.table)
|
||||
@ -2471,16 +2079,16 @@ class Mapper(_InspectionAttr):
|
||||
dep is not None and \
|
||||
dep is not parent and \
|
||||
dep.inherit_condition is not None:
|
||||
cols = set(sql_util._find_columns(dep.inherit_condition))
|
||||
cols = set(sql_util.find_columns(dep.inherit_condition))
|
||||
if parent.inherit_condition is not None:
|
||||
cols = cols.union(sql_util._find_columns(
|
||||
cols = cols.union(sql_util.find_columns(
|
||||
parent.inherit_condition))
|
||||
return fk.parent not in cols and fk.column not in cols
|
||||
else:
|
||||
return fk.parent not in cols
|
||||
return False
|
||||
|
||||
sorted_ = sql_util.sort_tables(table_to_mapper,
|
||||
sorted_ = sql_util.sort_tables(table_to_mapper.iterkeys(),
|
||||
skip_fn=skip,
|
||||
extra_dependencies=extra_dependencies)
|
||||
|
||||
@ -2515,6 +2123,9 @@ class Mapper(_InspectionAttr):
|
||||
|
||||
return result
|
||||
|
||||
inspection._self_inspects(Mapper)
|
||||
log.class_logger(Mapper)
|
||||
|
||||
|
||||
def configure_mappers():
|
||||
"""Initialize the inter-mapper relationships of all mappers that
|
||||
@ -2525,9 +2136,11 @@ def configure_mappers():
|
||||
|
||||
"""
|
||||
|
||||
if not Mapper._new_mappers:
|
||||
global _new_mappers
|
||||
if not _new_mappers:
|
||||
return
|
||||
|
||||
_call_configured = None
|
||||
_CONFIGURE_MUTEX.acquire()
|
||||
try:
|
||||
global _already_compiling
|
||||
@ -2537,15 +2150,13 @@ def configure_mappers():
|
||||
try:
|
||||
|
||||
# double-check inside mutex
|
||||
if not Mapper._new_mappers:
|
||||
if not _new_mappers:
|
||||
return
|
||||
|
||||
Mapper.dispatch(Mapper).before_configured()
|
||||
# initialize properties on all mappers
|
||||
# note that _mapper_registry is unordered, which
|
||||
# may randomly conceal/reveal issues related to
|
||||
# the order of mapper compilation
|
||||
|
||||
for mapper in list(_mapper_registry):
|
||||
if getattr(mapper, '_configure_failed', False):
|
||||
e = sa_exc.InvalidRequestError(
|
||||
@ -2561,18 +2172,20 @@ def configure_mappers():
|
||||
mapper._expire_memoizations()
|
||||
mapper.dispatch.mapper_configured(
|
||||
mapper, mapper.class_)
|
||||
_call_configured = mapper
|
||||
except:
|
||||
exc = sys.exc_info()[1]
|
||||
if not hasattr(exc, '_configure_failed'):
|
||||
mapper._configure_failed = exc
|
||||
raise
|
||||
|
||||
Mapper._new_mappers = False
|
||||
_new_mappers = False
|
||||
finally:
|
||||
_already_compiling = False
|
||||
finally:
|
||||
_CONFIGURE_MUTEX.release()
|
||||
Mapper.dispatch(Mapper).after_configured()
|
||||
if _call_configured is not None:
|
||||
_call_configured.dispatch.after_configured()
|
||||
|
||||
|
||||
def reconstructor(fn):
|
||||
@ -2618,28 +2231,13 @@ def validates(*names, **kw):
|
||||
argument "is_remove" which will be a boolean.
|
||||
|
||||
.. versionadded:: 0.7.7
|
||||
:param include_backrefs: defaults to ``True``; if ``False``, the
|
||||
validation function will not emit if the originator is an attribute
|
||||
event related via a backref. This can be used for bi-directional
|
||||
:func:`.validates` usage where only one validator should emit per
|
||||
attribute operation.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`simple_validators` - usage examples for :func:`.validates`
|
||||
|
||||
"""
|
||||
include_removes = kw.pop('include_removes', False)
|
||||
include_backrefs = kw.pop('include_backrefs', True)
|
||||
|
||||
def wrap(fn):
|
||||
fn.__sa_validators__ = names
|
||||
fn.__sa_validation_opts__ = {
|
||||
"include_removes": include_removes,
|
||||
"include_backrefs": include_backrefs
|
||||
}
|
||||
fn.__sa_include_removes__ = include_removes
|
||||
return fn
|
||||
return wrap
|
||||
|
||||
@ -2660,7 +2258,7 @@ def _event_on_first_init(manager, cls):
|
||||
|
||||
instrumenting_mapper = manager.info.get(_INSTRUMENTOR)
|
||||
if instrumenting_mapper:
|
||||
if Mapper._new_mappers:
|
||||
if _new_mappers:
|
||||
configure_mappers()
|
||||
|
||||
|
||||
@ -2675,7 +2273,7 @@ def _event_on_init(state, args, kwargs):
|
||||
|
||||
instrumenting_mapper = state.manager.info.get(_INSTRUMENTOR)
|
||||
if instrumenting_mapper:
|
||||
if Mapper._new_mappers:
|
||||
if _new_mappers:
|
||||
configure_mappers()
|
||||
if instrumenting_mapper._set_polymorphic_identity:
|
||||
instrumenting_mapper._set_polymorphic_identity(state)
|
||||
|
@ -1,261 +0,0 @@
|
||||
# orm/path_registry.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
"""Path tracking utilities, representing mapper graph traversals.
|
||||
|
||||
"""
|
||||
|
||||
from .. import inspection
|
||||
from .. import util
|
||||
from .. import exc
|
||||
from itertools import chain
|
||||
from .base import class_mapper
|
||||
|
||||
def _unreduce_path(path):
|
||||
return PathRegistry.deserialize(path)
|
||||
|
||||
|
||||
_WILDCARD_TOKEN = "*"
|
||||
_DEFAULT_TOKEN = "_sa_default"
|
||||
|
||||
class PathRegistry(object):
|
||||
"""Represent query load paths and registry functions.
|
||||
|
||||
Basically represents structures like:
|
||||
|
||||
(<User mapper>, "orders", <Order mapper>, "items", <Item mapper>)
|
||||
|
||||
These structures are generated by things like
|
||||
query options (joinedload(), subqueryload(), etc.) and are
|
||||
used to compose keys stored in the query._attributes dictionary
|
||||
for various options.
|
||||
|
||||
They are then re-composed at query compile/result row time as
|
||||
the query is formed and as rows are fetched, where they again
|
||||
serve to compose keys to look up options in the context.attributes
|
||||
dictionary, which is copied from query._attributes.
|
||||
|
||||
The path structure has a limited amount of caching, where each
|
||||
"root" ultimately pulls from a fixed registry associated with
|
||||
the first mapper, that also contains elements for each of its
|
||||
property keys. However paths longer than two elements, which
|
||||
are the exception rather than the rule, are generated on an
|
||||
as-needed basis.
|
||||
|
||||
"""
|
||||
|
||||
def __eq__(self, other):
|
||||
return other is not None and \
|
||||
self.path == other.path
|
||||
|
||||
def set(self, attributes, key, value):
|
||||
attributes[(key, self.path)] = value
|
||||
|
||||
def setdefault(self, attributes, key, value):
|
||||
attributes.setdefault((key, self.path), value)
|
||||
|
||||
def get(self, attributes, key, value=None):
|
||||
key = (key, self.path)
|
||||
if key in attributes:
|
||||
return attributes[key]
|
||||
else:
|
||||
return value
|
||||
|
||||
def __len__(self):
|
||||
return len(self.path)
|
||||
|
||||
@property
|
||||
def length(self):
|
||||
return len(self.path)
|
||||
|
||||
def pairs(self):
|
||||
path = self.path
|
||||
for i in range(0, len(path), 2):
|
||||
yield path[i], path[i + 1]
|
||||
|
||||
def contains_mapper(self, mapper):
|
||||
for path_mapper in [
|
||||
self.path[i] for i in range(0, len(self.path), 2)
|
||||
]:
|
||||
if path_mapper.is_mapper and \
|
||||
path_mapper.isa(mapper):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def contains(self, attributes, key):
|
||||
return (key, self.path) in attributes
|
||||
|
||||
def __reduce__(self):
|
||||
return _unreduce_path, (self.serialize(), )
|
||||
|
||||
def serialize(self):
|
||||
path = self.path
|
||||
return list(zip(
|
||||
[m.class_ for m in [path[i] for i in range(0, len(path), 2)]],
|
||||
[path[i].key for i in range(1, len(path), 2)] + [None]
|
||||
))
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, path):
|
||||
if path is None:
|
||||
return None
|
||||
|
||||
p = tuple(chain(*[(class_mapper(mcls),
|
||||
class_mapper(mcls).attrs[key]
|
||||
if key is not None else None)
|
||||
for mcls, key in path]))
|
||||
if p and p[-1] is None:
|
||||
p = p[0:-1]
|
||||
return cls.coerce(p)
|
||||
|
||||
@classmethod
|
||||
def per_mapper(cls, mapper):
|
||||
return EntityRegistry(
|
||||
cls.root, mapper
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def coerce(cls, raw):
|
||||
return util.reduce(lambda prev, next: prev[next], raw, cls.root)
|
||||
|
||||
def token(self, token):
|
||||
if token.endswith(':' + _WILDCARD_TOKEN):
|
||||
return TokenRegistry(self, token)
|
||||
elif token.endswith(":" + _DEFAULT_TOKEN):
|
||||
return TokenRegistry(self.root, token)
|
||||
else:
|
||||
raise exc.ArgumentError("invalid token: %s" % token)
|
||||
|
||||
def __add__(self, other):
|
||||
return util.reduce(
|
||||
lambda prev, next: prev[next],
|
||||
other.path, self)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%r)" % (self.__class__.__name__, self.path, )
|
||||
|
||||
|
||||
class RootRegistry(PathRegistry):
|
||||
"""Root registry, defers to mappers so that
|
||||
paths are maintained per-root-mapper.
|
||||
|
||||
"""
|
||||
path = ()
|
||||
has_entity = False
|
||||
def __getitem__(self, entity):
|
||||
return entity._path_registry
|
||||
|
||||
PathRegistry.root = RootRegistry()
|
||||
|
||||
class TokenRegistry(PathRegistry):
|
||||
def __init__(self, parent, token):
|
||||
self.token = token
|
||||
self.parent = parent
|
||||
self.path = parent.path + (token,)
|
||||
|
||||
has_entity = False
|
||||
|
||||
def __getitem__(self, entity):
|
||||
raise NotImplementedError()
|
||||
|
||||
class PropRegistry(PathRegistry):
|
||||
def __init__(self, parent, prop):
|
||||
# restate this path in terms of the
|
||||
# given MapperProperty's parent.
|
||||
insp = inspection.inspect(parent[-1])
|
||||
if not insp.is_aliased_class or insp._use_mapper_path:
|
||||
parent = parent.parent[prop.parent]
|
||||
elif insp.is_aliased_class and insp.with_polymorphic_mappers:
|
||||
if prop.parent is not insp.mapper and \
|
||||
prop.parent in insp.with_polymorphic_mappers:
|
||||
subclass_entity = parent[-1]._entity_for_mapper(prop.parent)
|
||||
parent = parent.parent[subclass_entity]
|
||||
|
||||
self.prop = prop
|
||||
self.parent = parent
|
||||
self.path = parent.path + (prop,)
|
||||
|
||||
@util.memoized_property
|
||||
def has_entity(self):
|
||||
return hasattr(self.prop, "mapper")
|
||||
|
||||
@util.memoized_property
|
||||
def entity(self):
|
||||
return self.prop.mapper
|
||||
|
||||
@util.memoized_property
|
||||
def _wildcard_path_loader_key(self):
|
||||
"""Given a path (mapper A, prop X), replace the prop with the wildcard,
|
||||
e.g. (mapper A, 'relationship:.*') or (mapper A, 'column:.*'), then
|
||||
return within the ("loader", path) structure.
|
||||
|
||||
"""
|
||||
return ("loader",
|
||||
self.parent.token(
|
||||
"%s:%s" % (self.prop.strategy_wildcard_key, _WILDCARD_TOKEN)
|
||||
).path
|
||||
)
|
||||
|
||||
@util.memoized_property
|
||||
def _default_path_loader_key(self):
|
||||
return ("loader",
|
||||
self.parent.token(
|
||||
"%s:%s" % (self.prop.strategy_wildcard_key, _DEFAULT_TOKEN)
|
||||
).path
|
||||
)
|
||||
|
||||
@util.memoized_property
|
||||
def _loader_key(self):
|
||||
return ("loader", self.path)
|
||||
|
||||
@property
|
||||
def mapper(self):
|
||||
return self.entity
|
||||
|
||||
@property
|
||||
def entity_path(self):
|
||||
return self[self.entity]
|
||||
|
||||
def __getitem__(self, entity):
|
||||
if isinstance(entity, (int, slice)):
|
||||
return self.path[entity]
|
||||
else:
|
||||
return EntityRegistry(
|
||||
self, entity
|
||||
)
|
||||
|
||||
class EntityRegistry(PathRegistry, dict):
|
||||
is_aliased_class = False
|
||||
has_entity = True
|
||||
|
||||
def __init__(self, parent, entity):
|
||||
self.key = entity
|
||||
self.parent = parent
|
||||
self.is_aliased_class = entity.is_aliased_class
|
||||
self.entity = entity
|
||||
self.path = parent.path + (entity,)
|
||||
self.entity_path = self
|
||||
|
||||
@property
|
||||
def mapper(self):
|
||||
return inspection.inspect(self.entity).mapper
|
||||
|
||||
def __bool__(self):
|
||||
return True
|
||||
__nonzero__ = __bool__
|
||||
|
||||
def __getitem__(self, entity):
|
||||
if isinstance(entity, (int, slice)):
|
||||
return self.path[entity]
|
||||
else:
|
||||
return dict.__getitem__(self, entity)
|
||||
|
||||
def __missing__(self, key):
|
||||
self[key] = item = PropRegistry(self, key)
|
||||
return item
|
||||
|
||||
|
||||
|
@ -17,7 +17,7 @@ import operator
|
||||
from itertools import groupby
|
||||
from .. import sql, util, exc as sa_exc, schema
|
||||
from . import attributes, sync, exc as orm_exc, evaluator
|
||||
from .base import _state_mapper, state_str, _attr_as_key
|
||||
from .util import _state_mapper, state_str, _attr_as_key
|
||||
from ..sql import expression
|
||||
from . import loading
|
||||
|
||||
@ -46,7 +46,7 @@ def save_obj(base_mapper, states, uowtransaction, single=False):
|
||||
|
||||
cached_connections = _cached_connection_dict(base_mapper)
|
||||
|
||||
for table, mapper in base_mapper._sorted_tables.items():
|
||||
for table, mapper in base_mapper._sorted_tables.iteritems():
|
||||
insert = _collect_insert_commands(base_mapper, uowtransaction,
|
||||
table, states_to_insert)
|
||||
|
||||
@ -61,7 +61,7 @@ def save_obj(base_mapper, states, uowtransaction, single=False):
|
||||
if insert:
|
||||
_emit_insert_statements(base_mapper, uowtransaction,
|
||||
cached_connections,
|
||||
mapper, table, insert)
|
||||
table, insert)
|
||||
|
||||
_finalize_insert_update_commands(base_mapper, uowtransaction,
|
||||
states_to_insert, states_to_update)
|
||||
@ -78,7 +78,7 @@ def post_update(base_mapper, states, uowtransaction, post_update_cols):
|
||||
base_mapper,
|
||||
states, uowtransaction)
|
||||
|
||||
for table, mapper in base_mapper._sorted_tables.items():
|
||||
for table, mapper in base_mapper._sorted_tables.iteritems():
|
||||
update = _collect_post_update_commands(base_mapper, uowtransaction,
|
||||
table, states_to_update,
|
||||
post_update_cols)
|
||||
@ -106,7 +106,7 @@ def delete_obj(base_mapper, states, uowtransaction):
|
||||
|
||||
table_to_mapper = base_mapper._sorted_tables
|
||||
|
||||
for table in reversed(list(table_to_mapper.keys())):
|
||||
for table in reversed(table_to_mapper.keys()):
|
||||
delete = _collect_delete_commands(base_mapper, uowtransaction,
|
||||
table, states_to_delete)
|
||||
|
||||
@ -246,12 +246,9 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
|
||||
value_params = {}
|
||||
|
||||
has_all_pks = True
|
||||
has_all_defaults = True
|
||||
for col in mapper._cols_by_table[table]:
|
||||
if col is mapper.version_id_col and \
|
||||
mapper.version_id_generator is not False:
|
||||
val = mapper.version_id_generator(None)
|
||||
params[col.key] = val
|
||||
if col is mapper.version_id_col:
|
||||
params[col.key] = mapper.version_id_generator(None)
|
||||
else:
|
||||
# pull straight from the dict for
|
||||
# pending objects
|
||||
@ -264,9 +261,6 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
|
||||
elif col.default is None and \
|
||||
col.server_default is None:
|
||||
params[col.key] = value
|
||||
elif col.server_default is not None and \
|
||||
mapper.base_mapper.eager_defaults:
|
||||
has_all_defaults = False
|
||||
|
||||
elif isinstance(value, sql.ClauseElement):
|
||||
value_params[col] = value
|
||||
@ -274,8 +268,7 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
|
||||
params[col.key] = value
|
||||
|
||||
insert.append((state, state_dict, params, mapper,
|
||||
connection, value_params, has_all_pks,
|
||||
has_all_defaults))
|
||||
connection, value_params, has_all_pks))
|
||||
return insert
|
||||
|
||||
|
||||
@ -322,20 +315,19 @@ def _collect_update_commands(base_mapper, uowtransaction,
|
||||
params[col.key] = history.added[0]
|
||||
hasdata = True
|
||||
else:
|
||||
if mapper.version_id_generator is not False:
|
||||
val = mapper.version_id_generator(params[col._label])
|
||||
params[col.key] = val
|
||||
params[col.key] = mapper.version_id_generator(
|
||||
params[col._label])
|
||||
|
||||
# HACK: check for history, in case the
|
||||
# history is only
|
||||
# in a different table than the one
|
||||
# where the version_id_col is.
|
||||
for prop in mapper._columntoproperty.values():
|
||||
history = attributes.get_state_history(
|
||||
state, prop.key,
|
||||
attributes.PASSIVE_NO_INITIALIZE)
|
||||
if history.added:
|
||||
hasdata = True
|
||||
# HACK: check for history, in case the
|
||||
# history is only
|
||||
# in a different table than the one
|
||||
# where the version_id_col is.
|
||||
for prop in mapper._columntoproperty.itervalues():
|
||||
history = attributes.get_state_history(
|
||||
state, prop.key,
|
||||
attributes.PASSIVE_NO_INITIALIZE)
|
||||
if history.added:
|
||||
hasdata = True
|
||||
else:
|
||||
prop = mapper._columntoproperty[col]
|
||||
history = attributes.get_state_history(
|
||||
@ -417,7 +409,6 @@ def _collect_post_update_commands(base_mapper, uowtransaction, table,
|
||||
mapper._get_state_attr_by_column(
|
||||
state,
|
||||
state_dict, col)
|
||||
|
||||
elif col in post_update_cols:
|
||||
prop = mapper._columntoproperty[col]
|
||||
history = attributes.get_state_history(
|
||||
@ -487,13 +478,7 @@ def _emit_update_statements(base_mapper, uowtransaction,
|
||||
sql.bindparam(mapper.version_id_col._label,
|
||||
type_=mapper.version_id_col.type))
|
||||
|
||||
stmt = table.update(clause)
|
||||
if mapper.base_mapper.eager_defaults:
|
||||
stmt = stmt.return_defaults()
|
||||
elif mapper.version_id_col is not None:
|
||||
stmt = stmt.return_defaults(mapper.version_id_col)
|
||||
|
||||
return stmt
|
||||
return table.update(clause)
|
||||
|
||||
statement = base_mapper._memo(('update', table), update_stmt)
|
||||
|
||||
@ -515,7 +500,8 @@ def _emit_update_statements(base_mapper, uowtransaction,
|
||||
table,
|
||||
state,
|
||||
state_dict,
|
||||
c,
|
||||
c.context.prefetch_cols,
|
||||
c.context.postfetch_cols,
|
||||
c.context.compiled_parameters[0],
|
||||
value_params)
|
||||
rows += c.rowcount
|
||||
@ -535,55 +521,44 @@ def _emit_update_statements(base_mapper, uowtransaction,
|
||||
|
||||
|
||||
def _emit_insert_statements(base_mapper, uowtransaction,
|
||||
cached_connections, mapper, table, insert):
|
||||
cached_connections, table, insert):
|
||||
"""Emit INSERT statements corresponding to value lists collected
|
||||
by _collect_insert_commands()."""
|
||||
|
||||
statement = base_mapper._memo(('insert', table), table.insert)
|
||||
|
||||
for (connection, pkeys, hasvalue, has_all_pks, has_all_defaults), \
|
||||
for (connection, pkeys, hasvalue, has_all_pks), \
|
||||
records in groupby(insert,
|
||||
lambda rec: (rec[4],
|
||||
list(rec[2].keys()),
|
||||
rec[2].keys(),
|
||||
bool(rec[5]),
|
||||
rec[6], rec[7])
|
||||
rec[6])
|
||||
):
|
||||
if \
|
||||
(
|
||||
has_all_defaults
|
||||
or not base_mapper.eager_defaults
|
||||
or not connection.dialect.implicit_returning
|
||||
) and has_all_pks and not hasvalue:
|
||||
|
||||
if has_all_pks and not hasvalue:
|
||||
records = list(records)
|
||||
multiparams = [rec[2] for rec in records]
|
||||
|
||||
c = cached_connections[connection].\
|
||||
execute(statement, multiparams)
|
||||
|
||||
for (state, state_dict, params, mapper_rec,
|
||||
conn, value_params, has_all_pks, has_all_defaults), \
|
||||
for (state, state_dict, params, mapper,
|
||||
conn, value_params, has_all_pks), \
|
||||
last_inserted_params in \
|
||||
zip(records, c.context.compiled_parameters):
|
||||
_postfetch(
|
||||
mapper_rec,
|
||||
mapper,
|
||||
uowtransaction,
|
||||
table,
|
||||
state,
|
||||
state_dict,
|
||||
c,
|
||||
c.context.prefetch_cols,
|
||||
c.context.postfetch_cols,
|
||||
last_inserted_params,
|
||||
value_params)
|
||||
|
||||
else:
|
||||
if not has_all_defaults and base_mapper.eager_defaults:
|
||||
statement = statement.return_defaults()
|
||||
elif mapper.version_id_col is not None:
|
||||
statement = statement.return_defaults(mapper.version_id_col)
|
||||
|
||||
for state, state_dict, params, mapper_rec, \
|
||||
for state, state_dict, params, mapper, \
|
||||
connection, value_params, \
|
||||
has_all_pks, has_all_defaults in records:
|
||||
has_all_pks in records:
|
||||
|
||||
if value_params:
|
||||
result = connection.execute(
|
||||
@ -599,22 +574,23 @@ def _emit_insert_statements(base_mapper, uowtransaction,
|
||||
# set primary key attributes
|
||||
for pk, col in zip(primary_key,
|
||||
mapper._pks_by_table[table]):
|
||||
prop = mapper_rec._columntoproperty[col]
|
||||
prop = mapper._columntoproperty[col]
|
||||
if state_dict.get(prop.key) is None:
|
||||
# TODO: would rather say:
|
||||
#state_dict[prop.key] = pk
|
||||
mapper_rec._set_state_attr_by_column(
|
||||
mapper._set_state_attr_by_column(
|
||||
state,
|
||||
state_dict,
|
||||
col, pk)
|
||||
|
||||
_postfetch(
|
||||
mapper_rec,
|
||||
mapper,
|
||||
uowtransaction,
|
||||
table,
|
||||
state,
|
||||
state_dict,
|
||||
result,
|
||||
result.context.prefetch_cols,
|
||||
result.context.postfetch_cols,
|
||||
result.context.compiled_parameters[0],
|
||||
value_params)
|
||||
|
||||
@ -640,7 +616,7 @@ def _emit_post_update_statements(base_mapper, uowtransaction,
|
||||
# also group them into common (connection, cols) sets
|
||||
# to support executemany().
|
||||
for key, grouper in groupby(
|
||||
update, lambda rec: (rec[4], list(rec[2].keys()))
|
||||
update, lambda rec: (rec[4], rec[2].keys())
|
||||
):
|
||||
connection = key[0]
|
||||
multiparams = [params for state, state_dict,
|
||||
@ -674,21 +650,26 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections,
|
||||
|
||||
return table.delete(clause)
|
||||
|
||||
for connection, del_objects in delete.items():
|
||||
for connection, del_objects in delete.iteritems():
|
||||
statement = base_mapper._memo(('delete', table), delete_stmt)
|
||||
|
||||
connection = cached_connections[connection]
|
||||
|
||||
expected = len(del_objects)
|
||||
rows_matched = -1
|
||||
only_warn = False
|
||||
if connection.dialect.supports_sane_multi_rowcount:
|
||||
c = connection.execute(statement, del_objects)
|
||||
|
||||
if not need_version_id:
|
||||
only_warn = True
|
||||
|
||||
rows_matched = c.rowcount
|
||||
# only do a row check if we have versioning turned on.
|
||||
# unfortunately, we *cannot* do a check on the number of
|
||||
# rows matched here in general, as there is the edge case
|
||||
# of a table that has a self-referential foreign key with
|
||||
# ON DELETE CASCADE on it, see #2403. I'm not sure how we can
|
||||
# resolve this, unless we require special configuration
|
||||
# to enable "count rows" for certain mappings, or to disable
|
||||
# it, or to based on it relationship(), not sure.
|
||||
if need_version_id:
|
||||
rows_matched = c.rowcount
|
||||
|
||||
elif need_version_id:
|
||||
if connection.dialect.supports_sane_rowcount:
|
||||
@ -708,24 +689,12 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections,
|
||||
else:
|
||||
connection.execute(statement, del_objects)
|
||||
|
||||
if base_mapper.confirm_deleted_rows and \
|
||||
rows_matched > -1 and expected != rows_matched:
|
||||
if only_warn:
|
||||
util.warn(
|
||||
"DELETE statement on table '%s' expected to "
|
||||
"delete %d row(s); %d were matched. Please set "
|
||||
"confirm_deleted_rows=False within the mapper "
|
||||
"configuration to prevent this warning." %
|
||||
(table.description, expected, rows_matched)
|
||||
)
|
||||
else:
|
||||
raise orm_exc.StaleDataError(
|
||||
"DELETE statement on table '%s' expected to "
|
||||
"delete %d row(s); %d were matched. Please set "
|
||||
"confirm_deleted_rows=False within the mapper "
|
||||
"configuration to prevent this warning." %
|
||||
(table.description, expected, rows_matched)
|
||||
)
|
||||
if rows_matched > -1 and expected != rows_matched:
|
||||
raise orm_exc.StaleDataError(
|
||||
"DELETE statement on table '%s' expected to "
|
||||
"delete %d row(s); %d were matched." %
|
||||
(table.description, expected, rows_matched)
|
||||
)
|
||||
|
||||
def _finalize_insert_update_commands(base_mapper, uowtransaction,
|
||||
states_to_insert, states_to_update):
|
||||
@ -745,25 +714,14 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction,
|
||||
if readonly:
|
||||
state._expire_attributes(state.dict, readonly)
|
||||
|
||||
# if eager_defaults option is enabled, load
|
||||
# all expired cols. Else if we have a version_id_col, make sure
|
||||
# it isn't expired.
|
||||
toload_now = []
|
||||
|
||||
if base_mapper.eager_defaults:
|
||||
toload_now.extend(state._unloaded_non_object)
|
||||
elif mapper.version_id_col is not None and \
|
||||
mapper.version_id_generator is False:
|
||||
prop = mapper._columntoproperty[mapper.version_id_col]
|
||||
if prop.key in state.unloaded:
|
||||
toload_now.extend([prop.key])
|
||||
|
||||
if toload_now:
|
||||
# if eager_defaults option is enabled,
|
||||
# refresh whatever has been expired.
|
||||
if base_mapper.eager_defaults and state.unloaded:
|
||||
state.key = base_mapper._identity_key_from_state(state)
|
||||
loading.load_on_ident(
|
||||
uowtransaction.session.query(base_mapper),
|
||||
state.key, refresh_state=state,
|
||||
only_load_props=toload_now)
|
||||
only_load_props=state.unloaded)
|
||||
|
||||
# call after_XXX extensions
|
||||
if not has_identity:
|
||||
@ -773,26 +731,15 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction,
|
||||
|
||||
|
||||
def _postfetch(mapper, uowtransaction, table,
|
||||
state, dict_, result, params, value_params):
|
||||
state, dict_, prefetch_cols, postfetch_cols,
|
||||
params, value_params):
|
||||
"""Expire attributes in need of newly persisted database state,
|
||||
after an INSERT or UPDATE statement has proceeded for that
|
||||
state."""
|
||||
|
||||
prefetch_cols = result.context.prefetch_cols
|
||||
postfetch_cols = result.context.postfetch_cols
|
||||
returning_cols = result.context.returning_cols
|
||||
|
||||
if mapper.version_id_col is not None:
|
||||
prefetch_cols = list(prefetch_cols) + [mapper.version_id_col]
|
||||
|
||||
if returning_cols:
|
||||
row = result.context.returned_defaults
|
||||
if row is not None:
|
||||
for col in returning_cols:
|
||||
if col.primary_key:
|
||||
continue
|
||||
mapper._set_state_attr_by_column(state, dict_, col, row[col])
|
||||
|
||||
for c in prefetch_cols:
|
||||
if c.key in params and c in mapper._columntoproperty:
|
||||
mapper._set_state_attr_by_column(state, dict_, c, params[c.key])
|
||||
@ -866,10 +813,6 @@ class BulkUD(object):
|
||||
def __init__(self, query):
|
||||
self.query = query.enable_eagerloads(False)
|
||||
|
||||
@property
|
||||
def session(self):
|
||||
return self.query.session
|
||||
|
||||
@classmethod
|
||||
def _factory(cls, lookup, synchronize_session, *arg):
|
||||
try:
|
||||
@ -878,7 +821,7 @@ class BulkUD(object):
|
||||
raise sa_exc.ArgumentError(
|
||||
"Valid strategies for session synchronization "
|
||||
"are %s" % (", ".join(sorted(repr(x)
|
||||
for x in lookup))))
|
||||
for x in lookup.keys()))))
|
||||
else:
|
||||
return klass(*arg)
|
||||
|
||||
@ -943,7 +886,7 @@ class BulkEvaluate(BulkUD):
|
||||
#TODO: detect when the where clause is a trivial primary key match
|
||||
self.matched_objects = [
|
||||
obj for (cls, pk), obj in
|
||||
query.session.identity_map.items()
|
||||
query.session.identity_map.iteritems()
|
||||
if issubclass(cls, target_cls) and
|
||||
eval_condition(obj)]
|
||||
|
||||
@ -987,7 +930,8 @@ class BulkUpdate(BulkUD):
|
||||
|
||||
def _do_post(self):
|
||||
session = self.query.session
|
||||
session.dispatch.after_bulk_update(self)
|
||||
session.dispatch.after_bulk_update(session, self.query,
|
||||
self.context, self.result)
|
||||
|
||||
|
||||
class BulkDelete(BulkUD):
|
||||
@ -1015,7 +959,8 @@ class BulkDelete(BulkUD):
|
||||
|
||||
def _do_post(self):
|
||||
session = self.query.session
|
||||
session.dispatch.after_bulk_delete(self)
|
||||
session.dispatch.after_bulk_delete(session, self.query,
|
||||
self.context, self.result)
|
||||
|
||||
|
||||
class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate):
|
||||
@ -1024,7 +969,7 @@ class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate):
|
||||
|
||||
def _additional_evaluators(self, evaluator_compiler):
|
||||
self.value_evaluators = {}
|
||||
for key, value in self.values.items():
|
||||
for key, value in self.values.iteritems():
|
||||
key = _attr_as_key(key)
|
||||
self.value_evaluators[key] = evaluator_compiler.process(
|
||||
expression._literal_as_binds(value))
|
||||
@ -1032,7 +977,7 @@ class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate):
|
||||
def _do_post_synchronize(self):
|
||||
session = self.query.session
|
||||
states = set()
|
||||
evaluated_keys = list(self.value_evaluators.keys())
|
||||
evaluated_keys = self.value_evaluators.keys()
|
||||
for obj in self.matched_objects:
|
||||
state, dict_ = attributes.instance_state(obj),\
|
||||
attributes.instance_dict(obj)
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -163,7 +163,7 @@ def makeprop(name):
|
||||
return property(get, set)
|
||||
|
||||
for prop in ('bind', 'dirty', 'deleted', 'new', 'identity_map',
|
||||
'is_active', 'autoflush', 'no_autoflush', 'info'):
|
||||
'is_active', 'autoflush', 'no_autoflush'):
|
||||
setattr(scoped_session, prop, makeprop(prop))
|
||||
|
||||
|
||||
|
@ -5,43 +5,28 @@
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
"""Provides the Session class and related utilities."""
|
||||
|
||||
|
||||
from __future__ import with_statement
|
||||
|
||||
import weakref
|
||||
from .. import util, sql, engine, exc as sa_exc
|
||||
from .. import util, sql, engine, exc as sa_exc, event
|
||||
from ..sql import util as sql_util, expression
|
||||
from . import (
|
||||
SessionExtension, attributes, exc, query,
|
||||
SessionExtension, attributes, exc, query, util as orm_util,
|
||||
loading, identity
|
||||
)
|
||||
from ..inspection import inspect
|
||||
from .base import (
|
||||
from .util import (
|
||||
object_mapper, class_mapper,
|
||||
_class_to_mapper, _state_mapper, object_state,
|
||||
_none_set, state_str, instance_str
|
||||
_none_set
|
||||
)
|
||||
from .unitofwork import UOWTransaction
|
||||
from . import state as statelib
|
||||
from .mapper import Mapper
|
||||
from .events import SessionEvents
|
||||
statelib = util.importlater("sqlalchemy.orm", "state")
|
||||
import sys
|
||||
|
||||
__all__ = ['Session', 'SessionTransaction', 'SessionExtension', 'sessionmaker']
|
||||
|
||||
_sessions = weakref.WeakValueDictionary()
|
||||
"""Weak-referencing dictionary of :class:`.Session` objects.
|
||||
"""
|
||||
|
||||
def _state_session(state):
|
||||
"""Given an :class:`.InstanceState`, return the :class:`.Session`
|
||||
associated, if any.
|
||||
"""
|
||||
if state.session_id:
|
||||
try:
|
||||
return _sessions[state.session_id]
|
||||
except KeyError:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
|
||||
class _SessionClassMethods(object):
|
||||
"""Class-level methods for :class:`.Session`, :class:`.sessionmaker`."""
|
||||
@ -54,8 +39,7 @@ class _SessionClassMethods(object):
|
||||
sess.close()
|
||||
|
||||
@classmethod
|
||||
@util.dependencies("sqlalchemy.orm.util")
|
||||
def identity_key(cls, orm_util, *args, **kwargs):
|
||||
def identity_key(cls, *args, **kwargs):
|
||||
"""Return an identity key.
|
||||
|
||||
This is an alias of :func:`.util.identity_key`.
|
||||
@ -344,7 +328,7 @@ class SessionTransaction(object):
|
||||
subtransaction.commit()
|
||||
|
||||
if not self.session._flushing:
|
||||
for _flush_guard in range(100):
|
||||
for _flush_guard in xrange(100):
|
||||
if self.session._is_clean():
|
||||
break
|
||||
self.session.flush()
|
||||
@ -485,7 +469,6 @@ class Session(_SessionClassMethods):
|
||||
_enable_transaction_accounting=True,
|
||||
autocommit=False, twophase=False,
|
||||
weak_identity_map=True, binds=None, extension=None,
|
||||
info=None,
|
||||
query_cls=query.Query):
|
||||
"""Construct a new Session.
|
||||
|
||||
@ -518,29 +501,27 @@ class Session(_SessionClassMethods):
|
||||
:ref:`session_autocommit`
|
||||
|
||||
:param autoflush: When ``True``, all query operations will issue a
|
||||
:meth:`~.Session.flush` call to this ``Session`` before proceeding.
|
||||
This is a convenience feature so that :meth:`~.Session.flush` need
|
||||
not be called repeatedly in order for database queries to retrieve
|
||||
results. It's typical that ``autoflush`` is used in conjunction with
|
||||
``flush()`` call to this ``Session`` before proceeding. This is a
|
||||
convenience feature so that ``flush()`` need not be called
|
||||
repeatedly in order for database queries to retrieve results. It's
|
||||
typical that ``autoflush`` is used in conjunction with
|
||||
``autocommit=False``. In this scenario, explicit calls to
|
||||
:meth:`~.Session.flush` are rarely needed; you usually only need to
|
||||
call :meth:`~.Session.commit` (which flushes) to finalize changes.
|
||||
``flush()`` are rarely needed; you usually only need to call
|
||||
``commit()`` (which flushes) to finalize changes.
|
||||
|
||||
:param bind: An optional :class:`.Engine` or :class:`.Connection` to
|
||||
which this ``Session`` should be bound. When specified, all SQL
|
||||
operations performed by this session will execute via this
|
||||
connectable.
|
||||
:param bind: An optional ``Engine`` or ``Connection`` to which this
|
||||
``Session`` should be bound. When specified, all SQL operations
|
||||
performed by this session will execute via this connectable.
|
||||
|
||||
:param binds: An optional dictionary which contains more granular
|
||||
"bind" information than the ``bind`` parameter provides. This
|
||||
dictionary can map individual :class`.Table`
|
||||
instances as well as :class:`~.Mapper` instances to individual
|
||||
:class:`.Engine` or :class:`.Connection` objects. Operations which
|
||||
proceed relative to a particular :class:`.Mapper` will consult this
|
||||
dictionary for the direct :class:`.Mapper` instance as
|
||||
well as the mapper's ``mapped_table`` attribute in order to locate a
|
||||
connectable to use. The full resolution is described in the
|
||||
:meth:`.Session.get_bind`.
|
||||
dictionary can map individual ``Table`` instances as well as
|
||||
``Mapper`` instances to individual ``Engine`` or ``Connection``
|
||||
objects. Operations which proceed relative to a particular
|
||||
``Mapper`` will consult this dictionary for the direct ``Mapper``
|
||||
instance as well as the mapper's ``mapped_table`` attribute in
|
||||
order to locate an connectable to use. The full resolution is
|
||||
described in the ``get_bind()`` method of ``Session``.
|
||||
Usage looks like::
|
||||
|
||||
Session = sessionmaker(binds={
|
||||
@ -555,7 +536,7 @@ class Session(_SessionClassMethods):
|
||||
:param \class_: Specify an alternate class other than
|
||||
``sqlalchemy.orm.session.Session`` which should be used by the
|
||||
returned class. This is the only argument that is local to the
|
||||
:class:`.sessionmaker` function, and is not sent directly to the
|
||||
``sessionmaker()`` function, and is not sent directly to the
|
||||
constructor for ``Session``.
|
||||
|
||||
:param _enable_transaction_accounting: Defaults to ``True``. A
|
||||
@ -563,12 +544,12 @@ class Session(_SessionClassMethods):
|
||||
object accounting on transaction boundaries, including auto-expiry
|
||||
of instances on rollback and commit, maintenance of the "new" and
|
||||
"deleted" lists upon rollback, and autoflush of pending changes upon
|
||||
:meth:`~.Session.begin`, all of which are interdependent.
|
||||
begin(), all of which are interdependent.
|
||||
|
||||
:param expire_on_commit: Defaults to ``True``. When ``True``, all
|
||||
instances will be fully expired after each :meth:`~.commit`,
|
||||
so that all attribute/object access subsequent to a completed
|
||||
transaction will load from the most recent database state.
|
||||
instances will be fully expired after each ``commit()``, so that
|
||||
all attribute/object access subsequent to a completed transaction
|
||||
will load from the most recent database state.
|
||||
|
||||
:param extension: An optional
|
||||
:class:`~.SessionExtension` instance, or a list
|
||||
@ -576,26 +557,17 @@ class Session(_SessionClassMethods):
|
||||
flush events, as well as a post-rollback event. **Deprecated.**
|
||||
Please see :class:`.SessionEvents`.
|
||||
|
||||
:param info: optional dictionary of arbitrary data to be associated
|
||||
with this :class:`.Session`. Is available via the :attr:`.Session.info`
|
||||
attribute. Note the dictionary is copied at construction time so
|
||||
that modifications to the per-:class:`.Session` dictionary will be local
|
||||
to that :class:`.Session`.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
:param query_cls: Class which should be used to create new Query
|
||||
objects, as returned by the :meth:`~.Session.query` method. Defaults
|
||||
to :class:`.Query`.
|
||||
objects, as returned by the ``query()`` method. Defaults to
|
||||
:class:`~sqlalchemy.orm.query.Query`.
|
||||
|
||||
:param twophase: When ``True``, all transactions will be started as
|
||||
a "two phase" transaction, i.e. using the "two phase" semantics
|
||||
of the database in use along with an XID. During a
|
||||
:meth:`~.commit`, after :meth:`~.flush` has been issued for all
|
||||
attached databases, the :meth:`~.TwoPhaseTransaction.prepare` method
|
||||
on each database's :class:`.TwoPhaseTransaction` will be called.
|
||||
This allows each database to roll back the entire transaction,
|
||||
before each transaction is committed.
|
||||
of the database in use along with an XID. During a ``commit()``,
|
||||
after ``flush()`` has been issued for all attached databases, the
|
||||
``prepare()`` method on each database's ``TwoPhaseTransaction``
|
||||
will be called. This allows each database to roll back the entire
|
||||
transaction, before each transaction is committed.
|
||||
|
||||
:param weak_identity_map: Defaults to ``True`` - when set to
|
||||
``False``, objects placed in the :class:`.Session` will be
|
||||
@ -627,50 +599,31 @@ class Session(_SessionClassMethods):
|
||||
self._enable_transaction_accounting = _enable_transaction_accounting
|
||||
self.twophase = twophase
|
||||
self._query_cls = query_cls
|
||||
if info:
|
||||
self.info.update(info)
|
||||
|
||||
if extension:
|
||||
for ext in util.to_list(extension):
|
||||
SessionExtension._adapt_listener(self, ext)
|
||||
|
||||
if binds is not None:
|
||||
for mapperortable, bind in binds.items():
|
||||
insp = inspect(mapperortable)
|
||||
if insp.is_selectable:
|
||||
self.bind_table(mapperortable, bind)
|
||||
elif insp.is_mapper:
|
||||
for mapperortable, bind in binds.iteritems():
|
||||
if isinstance(mapperortable, (type, Mapper)):
|
||||
self.bind_mapper(mapperortable, bind)
|
||||
else:
|
||||
assert False
|
||||
|
||||
self.bind_table(mapperortable, bind)
|
||||
|
||||
if not self.autocommit:
|
||||
self.begin()
|
||||
_sessions[self.hash_key] = self
|
||||
|
||||
dispatch = event.dispatcher(SessionEvents)
|
||||
|
||||
connection_callable = None
|
||||
|
||||
transaction = None
|
||||
"""The current active or inactive :class:`.SessionTransaction`."""
|
||||
|
||||
@util.memoized_property
|
||||
def info(self):
|
||||
"""A user-modifiable dictionary.
|
||||
|
||||
The initial value of this dictioanry can be populated using the
|
||||
``info`` argument to the :class:`.Session` constructor or
|
||||
:class:`.sessionmaker` constructor or factory methods. The dictionary
|
||||
here is always local to this :class:`.Session` and can be modified
|
||||
independently of all other :class:`.Session` objects.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
"""
|
||||
return {}
|
||||
|
||||
def begin(self, subtransactions=False, nested=False):
|
||||
"""Begin a transaction on this :class:`.Session`.
|
||||
"""Begin a transaction on this Session.
|
||||
|
||||
If this Session is already within a transaction, either a plain
|
||||
transaction or nested transaction, an error is raised, unless
|
||||
@ -719,7 +672,7 @@ class Session(_SessionClassMethods):
|
||||
This method rolls back the current transaction or nested transaction
|
||||
regardless of subtransactions being in effect. All subtransactions up
|
||||
to the first real transaction are closed. Subtransactions occur when
|
||||
:meth:`.begin` is called multiple times.
|
||||
begin() is called multiple times.
|
||||
|
||||
.. seealso::
|
||||
|
||||
@ -826,7 +779,7 @@ class Session(_SessionClassMethods):
|
||||
etc.) which will be used to locate a bind, if a bind
|
||||
cannot otherwise be identified.
|
||||
|
||||
:param close_with_result: Passed to :meth:`.Engine.connect`, indicating
|
||||
:param close_with_result: Passed to :meth:`Engine.connect`, indicating
|
||||
the :class:`.Connection` should be considered "single use",
|
||||
automatically closing when the first result set is closed. This
|
||||
flag only has an effect if this :class:`.Session` is configured with
|
||||
@ -1016,7 +969,7 @@ class Session(_SessionClassMethods):
|
||||
self._deleted = {}
|
||||
|
||||
# TODO: need much more test coverage for bind_mapper() and similar !
|
||||
# TODO: + crystallize + document resolution order
|
||||
# TODO: + crystalize + document resolution order
|
||||
# vis. bind_mapper/bind_table
|
||||
|
||||
def bind_mapper(self, mapper, bind):
|
||||
@ -1026,7 +979,7 @@ class Session(_SessionClassMethods):
|
||||
A mapper instance or mapped class
|
||||
|
||||
bind
|
||||
Any Connectable: a :class:`.Engine` or :class:`.Connection`.
|
||||
Any Connectable: a ``Engine`` or ``Connection``.
|
||||
|
||||
All subsequent operations involving this mapper will use the given
|
||||
`bind`.
|
||||
@ -1043,12 +996,12 @@ class Session(_SessionClassMethods):
|
||||
"""Bind operations on a Table to a Connectable.
|
||||
|
||||
table
|
||||
A :class:`.Table` instance
|
||||
A ``Table`` instance
|
||||
|
||||
bind
|
||||
Any Connectable: a :class:`.Engine` or :class:`.Connection`.
|
||||
Any Connectable: a ``Engine`` or ``Connection``.
|
||||
|
||||
All subsequent operations involving this :class:`.Table` will use the
|
||||
All subsequent operations involving this ``Table`` will use the
|
||||
given `bind`.
|
||||
|
||||
"""
|
||||
@ -1149,8 +1102,7 @@ class Session(_SessionClassMethods):
|
||||
', '.join(context)))
|
||||
|
||||
def query(self, *entities, **kwargs):
|
||||
"""Return a new :class:`.Query` object corresponding to this
|
||||
:class:`.Session`."""
|
||||
"""Return a new ``Query`` object corresponding to this ``Session``."""
|
||||
|
||||
return self._query_cls(entities, self, **kwargs)
|
||||
|
||||
@ -1184,18 +1136,7 @@ class Session(_SessionClassMethods):
|
||||
|
||||
def _autoflush(self):
|
||||
if self.autoflush and not self._flushing:
|
||||
try:
|
||||
self.flush()
|
||||
except sa_exc.StatementError as e:
|
||||
# note we are reraising StatementError as opposed to
|
||||
# raising FlushError with "chaining" to remain compatible
|
||||
# with code that catches StatementError, IntegrityError,
|
||||
# etc.
|
||||
e.add_detail(
|
||||
"raised as a result of Query-invoked autoflush; "
|
||||
"consider using a session.no_autoflush block if this "
|
||||
"flush is occurring prematurely")
|
||||
util.raise_from_cause(e)
|
||||
self.flush()
|
||||
|
||||
def refresh(self, instance, attribute_names=None, lockmode=None):
|
||||
"""Expire and refresh the attributes on the given instance.
|
||||
@ -1224,14 +1165,6 @@ class Session(_SessionClassMethods):
|
||||
:param lockmode: Passed to the :class:`~sqlalchemy.orm.query.Query`
|
||||
as used by :meth:`~sqlalchemy.orm.query.Query.with_lockmode`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_expire` - introductory material
|
||||
|
||||
:meth:`.Session.expire`
|
||||
|
||||
:meth:`.Session.expire_all`
|
||||
|
||||
"""
|
||||
try:
|
||||
state = attributes.instance_state(instance)
|
||||
@ -1247,7 +1180,7 @@ class Session(_SessionClassMethods):
|
||||
only_load_props=attribute_names) is None:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Could not refresh instance '%s'" %
|
||||
instance_str(instance))
|
||||
orm_util.instance_str(instance))
|
||||
|
||||
def expire_all(self):
|
||||
"""Expires all persistent instances within this Session.
|
||||
@ -1270,14 +1203,6 @@ class Session(_SessionClassMethods):
|
||||
calling :meth:`Session.expire_all` should not be needed when
|
||||
autocommit is ``False``, assuming the transaction is isolated.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_expire` - introductory material
|
||||
|
||||
:meth:`.Session.expire`
|
||||
|
||||
:meth:`.Session.refresh`
|
||||
|
||||
"""
|
||||
for state in self.identity_map.all_states():
|
||||
state._expire(state.dict, self.identity_map._modified)
|
||||
@ -1308,14 +1233,6 @@ class Session(_SessionClassMethods):
|
||||
:param attribute_names: optional list of string attribute names
|
||||
indicating a subset of attributes to be expired.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_expire` - introductory material
|
||||
|
||||
:meth:`.Session.expire`
|
||||
|
||||
:meth:`.Session.refresh`
|
||||
|
||||
"""
|
||||
try:
|
||||
state = attributes.instance_state(instance)
|
||||
@ -1374,7 +1291,7 @@ class Session(_SessionClassMethods):
|
||||
if state.session_id is not self.hash_key:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Instance %s is not present in this Session" %
|
||||
state_str(state))
|
||||
orm_util.state_str(state))
|
||||
|
||||
cascaded = list(state.manager.mapper.cascade_iterator(
|
||||
'expunge', state))
|
||||
@ -1414,7 +1331,7 @@ class Session(_SessionClassMethods):
|
||||
"expect these generated values. Ensure also that "
|
||||
"this flush() is not occurring at an inappropriate "
|
||||
"time, such aswithin a load() event."
|
||||
% state_str(state)
|
||||
% orm_util.state_str(state)
|
||||
)
|
||||
|
||||
if state.key is None:
|
||||
@ -1517,7 +1434,7 @@ class Session(_SessionClassMethods):
|
||||
if state.key is None:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Instance '%s' is not persisted" %
|
||||
state_str(state))
|
||||
orm_util.state_str(state))
|
||||
|
||||
if state in self._deleted:
|
||||
return
|
||||
@ -1681,7 +1598,7 @@ class Session(_SessionClassMethods):
|
||||
"merging to update the most recent version."
|
||||
% (
|
||||
existing_version,
|
||||
state_str(merged_state),
|
||||
orm_util.state_str(merged_state),
|
||||
merged_version
|
||||
))
|
||||
|
||||
@ -1705,13 +1622,13 @@ class Session(_SessionClassMethods):
|
||||
if not self.identity_map.contains_state(state):
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Instance '%s' is not persistent within this Session" %
|
||||
state_str(state))
|
||||
orm_util.state_str(state))
|
||||
|
||||
def _save_impl(self, state):
|
||||
if state.key is not None:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Object '%s' already has an identity - it can't be registered "
|
||||
"as pending" % state_str(state))
|
||||
"as pending" % orm_util.state_str(state))
|
||||
|
||||
self._before_attach(state)
|
||||
if state not in self._new:
|
||||
@ -1727,13 +1644,13 @@ class Session(_SessionClassMethods):
|
||||
if state.key is None:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Instance '%s' is not persisted" %
|
||||
state_str(state))
|
||||
orm_util.state_str(state))
|
||||
|
||||
if state.deleted:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Instance '%s' has been deleted. Use the make_transient() "
|
||||
"function to send this object back to the transient state." %
|
||||
state_str(state)
|
||||
orm_util.state_str(state)
|
||||
)
|
||||
self._before_attach(state)
|
||||
self._deleted.pop(state, None)
|
||||
@ -1827,14 +1744,14 @@ class Session(_SessionClassMethods):
|
||||
raise sa_exc.InvalidRequestError("Can't attach instance "
|
||||
"%s; another instance with key %s is already "
|
||||
"present in this session."
|
||||
% (state_str(state), state.key))
|
||||
% (orm_util.state_str(state), state.key))
|
||||
|
||||
if state.session_id and \
|
||||
state.session_id is not self.hash_key and \
|
||||
state.session_id in _sessions:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Object '%s' is already attached to session '%s' "
|
||||
"(this is '%s')" % (state_str(state),
|
||||
"(this is '%s')" % (orm_util.state_str(state),
|
||||
state.session_id, self.hash_key))
|
||||
|
||||
if state.session_id != self.hash_key:
|
||||
@ -1865,7 +1782,7 @@ class Session(_SessionClassMethods):
|
||||
Session.
|
||||
|
||||
"""
|
||||
return iter(list(self._new.values()) + list(self.identity_map.values()))
|
||||
return iter(list(self._new.values()) + self.identity_map.values())
|
||||
|
||||
def _contains_state(self, state):
|
||||
return state in self._new or self.identity_map.contains_state(state)
|
||||
@ -2229,13 +2146,13 @@ class Session(_SessionClassMethods):
|
||||
def deleted(self):
|
||||
"The set of all instances marked as 'deleted' within this ``Session``"
|
||||
|
||||
return util.IdentitySet(list(self._deleted.values()))
|
||||
return util.IdentitySet(self._deleted.values())
|
||||
|
||||
@property
|
||||
def new(self):
|
||||
"The set of all instances marked as 'new' within this ``Session``."
|
||||
|
||||
return util.IdentitySet(list(self._new.values()))
|
||||
return util.IdentitySet(self._new.values())
|
||||
|
||||
|
||||
class sessionmaker(_SessionClassMethods):
|
||||
@ -2286,8 +2203,7 @@ class sessionmaker(_SessionClassMethods):
|
||||
|
||||
def __init__(self, bind=None, class_=Session, autoflush=True,
|
||||
autocommit=False,
|
||||
expire_on_commit=True,
|
||||
info=None, **kw):
|
||||
expire_on_commit=True, **kw):
|
||||
"""Construct a new :class:`.sessionmaker`.
|
||||
|
||||
All arguments here except for ``class_`` correspond to arguments
|
||||
@ -2304,13 +2220,6 @@ class sessionmaker(_SessionClassMethods):
|
||||
:class:`.Session` objects.
|
||||
:param expire_on_commit=True: the expire_on_commit setting to use
|
||||
with newly created :class:`.Session` objects.
|
||||
:param info: optional dictionary of information that will be available
|
||||
via :attr:`.Session.info`. Note this dictionary is *updated*, not
|
||||
replaced, when the ``info`` parameter is specified to the specific
|
||||
:class:`.Session` construction operation.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
:param \**kw: all other keyword arguments are passed to the constructor
|
||||
of newly created :class:`.Session` objects.
|
||||
|
||||
@ -2319,8 +2228,6 @@ class sessionmaker(_SessionClassMethods):
|
||||
kw['autoflush'] = autoflush
|
||||
kw['autocommit'] = autocommit
|
||||
kw['expire_on_commit'] = expire_on_commit
|
||||
if info is not None:
|
||||
kw['info'] = info
|
||||
self.kw = kw
|
||||
# make our own subclass of the given class, so that
|
||||
# events can be associated with it specifically.
|
||||
@ -2338,12 +2245,7 @@ class sessionmaker(_SessionClassMethods):
|
||||
|
||||
"""
|
||||
for k, v in self.kw.items():
|
||||
if k == 'info' and 'info' in local_kw:
|
||||
d = v.copy()
|
||||
d.update(local_kw['info'])
|
||||
local_kw['info'] = d
|
||||
else:
|
||||
local_kw.setdefault(k, v)
|
||||
local_kw.setdefault(k, v)
|
||||
return self.class_(**local_kw)
|
||||
|
||||
def configure(self, **new_kw):
|
||||
@ -2358,12 +2260,14 @@ class sessionmaker(_SessionClassMethods):
|
||||
self.kw.update(new_kw)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(class_=%r,%s)" % (
|
||||
return "%s(class_=%r%s)" % (
|
||||
self.__class__.__name__,
|
||||
self.class_.__name__,
|
||||
", ".join("%s=%r" % (k, v) for k, v in self.kw.items())
|
||||
)
|
||||
|
||||
_sessions = weakref.WeakValueDictionary()
|
||||
|
||||
|
||||
def make_transient(instance):
|
||||
"""Make the given instance 'transient'.
|
||||
@ -2393,41 +2297,6 @@ def make_transient(instance):
|
||||
if state.deleted:
|
||||
del state.deleted
|
||||
|
||||
def make_transient_to_detached(instance):
|
||||
"""Make the given transient instance 'detached'.
|
||||
|
||||
All attribute history on the given instance
|
||||
will be reset as though the instance were freshly loaded
|
||||
from a query. Missing attributes will be marked as expired.
|
||||
The primary key attributes of the object, which are required, will be made
|
||||
into the "key" of the instance.
|
||||
|
||||
The object can then be added to a session, or merged
|
||||
possibly with the load=False flag, at which point it will look
|
||||
as if it were loaded that way, without emitting SQL.
|
||||
|
||||
This is a special use case function that differs from a normal
|
||||
call to :meth:`.Session.merge` in that a given persistent state
|
||||
can be manufactured without any SQL calls.
|
||||
|
||||
.. versionadded:: 0.9.5
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.make_transient`
|
||||
|
||||
"""
|
||||
state = attributes.instance_state(instance)
|
||||
if state.session_id or state.key:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Given object must be transient")
|
||||
state.key = state.mapper._identity_key_from_state(state)
|
||||
if state.deleted:
|
||||
del state.deleted
|
||||
state._commit_all(state.dict)
|
||||
state._expire_attributes(state.dict, state.unloaded)
|
||||
|
||||
|
||||
|
||||
def object_session(instance):
|
||||
"""Return the ``Session`` to which instance belongs.
|
||||
@ -2442,4 +2311,12 @@ def object_session(instance):
|
||||
raise exc.UnmappedInstanceError(instance)
|
||||
|
||||
|
||||
def _state_session(state):
|
||||
if state.session_id:
|
||||
try:
|
||||
return _sessions[state.session_id]
|
||||
except KeyError:
|
||||
pass
|
||||
return None
|
||||
|
||||
_new_sessionid = util.counter()
|
||||
|
@ -13,37 +13,19 @@ defines a large part of the ORM's interactivity.
|
||||
|
||||
import weakref
|
||||
from .. import util
|
||||
from . import exc as orm_exc, interfaces
|
||||
from .path_registry import PathRegistry
|
||||
from .base import PASSIVE_NO_RESULT, SQL_OK, NEVER_SET, ATTR_WAS_SET, \
|
||||
NO_VALUE, PASSIVE_NO_INITIALIZE, INIT_OK, PASSIVE_OFF
|
||||
from . import base
|
||||
from . import exc as orm_exc, attributes, util as orm_util, interfaces
|
||||
from .attributes import (
|
||||
PASSIVE_NO_RESULT,
|
||||
SQL_OK, NEVER_SET, ATTR_WAS_SET, NO_VALUE,\
|
||||
PASSIVE_NO_INITIALIZE
|
||||
)
|
||||
sessionlib = util.importlater("sqlalchemy.orm", "session")
|
||||
instrumentation = util.importlater("sqlalchemy.orm", "instrumentation")
|
||||
mapperlib = util.importlater("sqlalchemy.orm", "mapperlib")
|
||||
|
||||
|
||||
class InstanceState(interfaces._InspectionAttr):
|
||||
"""tracks state information at the instance level.
|
||||
|
||||
The :class:`.InstanceState` is a key object used by the
|
||||
SQLAlchemy ORM in order to track the state of an object;
|
||||
it is created the moment an object is instantiated, typically
|
||||
as a result of :term:`instrumentation` which SQLAlchemy applies
|
||||
to the ``__init__()`` method of the class.
|
||||
|
||||
:class:`.InstanceState` is also a semi-public object,
|
||||
available for runtime inspection as to the state of a
|
||||
mapped instance, including information such as its current
|
||||
status within a particular :class:`.Session` and details
|
||||
about data on individual attributes. The public API
|
||||
in order to acquire a :class:`.InstanceState` object
|
||||
is to use the :func:`.inspect` system::
|
||||
|
||||
>>> from sqlalchemy import inspect
|
||||
>>> insp = inspect(some_mapped_object)
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`core_inspection_toplevel`
|
||||
|
||||
"""
|
||||
"""tracks state information at the instance level."""
|
||||
|
||||
session_id = None
|
||||
key = None
|
||||
@ -73,9 +55,6 @@ class InstanceState(interfaces._InspectionAttr):
|
||||
and history.
|
||||
|
||||
The returned object is an instance of :class:`.AttributeState`.
|
||||
This object allows inspection of the current data
|
||||
within an attribute as well as attribute history
|
||||
since the last flush.
|
||||
|
||||
"""
|
||||
return util.ImmutableProperties(
|
||||
@ -87,64 +66,38 @@ class InstanceState(interfaces._InspectionAttr):
|
||||
|
||||
@property
|
||||
def transient(self):
|
||||
"""Return true if the object is :term:`transient`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_object_states`
|
||||
|
||||
"""
|
||||
"""Return true if the object is transient."""
|
||||
return self.key is None and \
|
||||
not self._attached
|
||||
|
||||
@property
|
||||
def pending(self):
|
||||
"""Return true if the object is :term:`pending`.
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_object_states`
|
||||
|
||||
"""
|
||||
"""Return true if the object is pending."""
|
||||
return self.key is None and \
|
||||
self._attached
|
||||
|
||||
@property
|
||||
def persistent(self):
|
||||
"""Return true if the object is :term:`persistent`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_object_states`
|
||||
|
||||
"""
|
||||
"""Return true if the object is persistent."""
|
||||
return self.key is not None and \
|
||||
self._attached
|
||||
|
||||
@property
|
||||
def detached(self):
|
||||
"""Return true if the object is :term:`detached`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_object_states`
|
||||
|
||||
"""
|
||||
"""Return true if the object is detached."""
|
||||
return self.key is not None and \
|
||||
not self._attached
|
||||
|
||||
@property
|
||||
@util.dependencies("sqlalchemy.orm.session")
|
||||
def _attached(self, sessionlib):
|
||||
def _attached(self):
|
||||
return self.session_id is not None and \
|
||||
self.session_id in sessionlib._sessions
|
||||
|
||||
@property
|
||||
@util.dependencies("sqlalchemy.orm.session")
|
||||
def session(self, sessionlib):
|
||||
def session(self):
|
||||
"""Return the owning :class:`.Session` for this instance,
|
||||
or ``None`` if none available."""
|
||||
|
||||
return sessionlib._state_session(self)
|
||||
|
||||
@property
|
||||
@ -231,20 +184,9 @@ class InstanceState(interfaces._InspectionAttr):
|
||||
|
||||
@property
|
||||
def dict(self):
|
||||
"""Return the instance dict used by the object.
|
||||
|
||||
Under normal circumstances, this is always synonymous
|
||||
with the ``__dict__`` attribute of the mapped object,
|
||||
unless an alternative instrumentation system has been
|
||||
configured.
|
||||
|
||||
In the case that the actual object has been garbage
|
||||
collected, this accessor returns a blank dictionary.
|
||||
|
||||
"""
|
||||
o = self.obj()
|
||||
if o is not None:
|
||||
return base.instance_dict(o)
|
||||
return attributes.instance_dict(o)
|
||||
else:
|
||||
return {}
|
||||
|
||||
@ -272,8 +214,8 @@ class InstanceState(interfaces._InspectionAttr):
|
||||
return self._pending_mutations[key]
|
||||
|
||||
def __getstate__(self):
|
||||
state_dict = {'instance': self.obj()}
|
||||
state_dict.update(
|
||||
d = {'instance': self.obj()}
|
||||
d.update(
|
||||
(k, self.__dict__[k]) for k in (
|
||||
'committed_state', '_pending_mutations', 'modified', 'expired',
|
||||
'callables', 'key', 'parents', 'load_options',
|
||||
@ -281,14 +223,14 @@ class InstanceState(interfaces._InspectionAttr):
|
||||
) if k in self.__dict__
|
||||
)
|
||||
if self.load_path:
|
||||
state_dict['load_path'] = self.load_path.serialize()
|
||||
d['load_path'] = self.load_path.serialize()
|
||||
|
||||
state_dict['manager'] = self.manager._serialize(self, state_dict)
|
||||
self.manager.dispatch.pickle(self, d)
|
||||
|
||||
return state_dict
|
||||
return d
|
||||
|
||||
def __setstate__(self, state_dict):
|
||||
inst = state_dict['instance']
|
||||
def __setstate__(self, state):
|
||||
inst = state['instance']
|
||||
if inst is not None:
|
||||
self.obj = weakref.ref(inst, self._cleanup)
|
||||
self.class_ = inst.__class__
|
||||
@ -297,26 +239,42 @@ class InstanceState(interfaces._InspectionAttr):
|
||||
# due to storage of state in "parents". "class_"
|
||||
# also new.
|
||||
self.obj = None
|
||||
self.class_ = state_dict['class_']
|
||||
self.class_ = state['class_']
|
||||
self.manager = manager = instrumentation.manager_of_class(self.class_)
|
||||
if manager is None:
|
||||
raise orm_exc.UnmappedInstanceError(
|
||||
inst,
|
||||
"Cannot deserialize object of type %r - "
|
||||
"no mapper() has "
|
||||
"been configured for this class within the current "
|
||||
"Python process!" %
|
||||
self.class_)
|
||||
elif manager.is_mapped and not manager.mapper.configured:
|
||||
mapperlib.configure_mappers()
|
||||
|
||||
self.committed_state = state_dict.get('committed_state', {})
|
||||
self._pending_mutations = state_dict.get('_pending_mutations', {})
|
||||
self.parents = state_dict.get('parents', {})
|
||||
self.modified = state_dict.get('modified', False)
|
||||
self.expired = state_dict.get('expired', False)
|
||||
self.callables = state_dict.get('callables', {})
|
||||
self.committed_state = state.get('committed_state', {})
|
||||
self._pending_mutations = state.get('_pending_mutations', {})
|
||||
self.parents = state.get('parents', {})
|
||||
self.modified = state.get('modified', False)
|
||||
self.expired = state.get('expired', False)
|
||||
self.callables = state.get('callables', {})
|
||||
|
||||
self.__dict__.update([
|
||||
(k, state_dict[k]) for k in (
|
||||
(k, state[k]) for k in (
|
||||
'key', 'load_options',
|
||||
) if k in state_dict
|
||||
) if k in state
|
||||
])
|
||||
|
||||
if 'load_path' in state_dict:
|
||||
self.load_path = PathRegistry.\
|
||||
deserialize(state_dict['load_path'])
|
||||
if 'load_path' in state:
|
||||
self.load_path = orm_util.PathRegistry.\
|
||||
deserialize(state['load_path'])
|
||||
|
||||
state_dict['manager'](self, inst, state_dict)
|
||||
# setup _sa_instance_state ahead of time so that
|
||||
# unpickle events can access the object normally.
|
||||
# see [ticket:2362]
|
||||
if inst is not None:
|
||||
manager.setup_instance(inst, self)
|
||||
manager.dispatch.unpickle(self, state)
|
||||
|
||||
def _initialize(self, key):
|
||||
"""Set this attribute to an empty value or collection,
|
||||
@ -454,13 +412,6 @@ class InstanceState(interfaces._InspectionAttr):
|
||||
difference(self.committed_state).\
|
||||
difference(self.dict)
|
||||
|
||||
@property
|
||||
def _unloaded_non_object(self):
|
||||
return self.unloaded.intersection(
|
||||
attr for attr in self.manager
|
||||
if self.manager[attr].impl.accepts_scalar_loader
|
||||
)
|
||||
|
||||
@property
|
||||
def expired_attributes(self):
|
||||
"""Return the set of keys which are 'expired' to be loaded by
|
||||
@ -477,8 +428,6 @@ class InstanceState(interfaces._InspectionAttr):
|
||||
return None
|
||||
|
||||
def _modified_event(self, dict_, attr, previous, collection=False, force=False):
|
||||
if not attr.send_modified_events:
|
||||
return
|
||||
if attr.key not in self.committed_state or force:
|
||||
if collection:
|
||||
if previous is NEVER_SET:
|
||||
@ -512,7 +461,7 @@ class InstanceState(interfaces._InspectionAttr):
|
||||
"collected."
|
||||
% (
|
||||
self.manager[attr.key],
|
||||
base.state_class_str(self)
|
||||
orm_util.state_class_str(self)
|
||||
))
|
||||
self.modified = True
|
||||
|
||||
@ -619,40 +568,10 @@ class AttributeState(object):
|
||||
"""Return the current pre-flush change history for
|
||||
this attribute, via the :class:`.History` interface.
|
||||
|
||||
This method will **not** emit loader callables if the value of the
|
||||
attribute is unloaded.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.AttributeState.load_history` - retrieve history
|
||||
using loader callables if the value is not locally present.
|
||||
|
||||
:func:`.attributes.get_history` - underlying function
|
||||
|
||||
"""
|
||||
return self.state.get_history(self.key,
|
||||
PASSIVE_NO_INITIALIZE)
|
||||
|
||||
def load_history(self):
|
||||
"""Return the current pre-flush change history for
|
||||
this attribute, via the :class:`.History` interface.
|
||||
|
||||
This method **will** emit loader callables if the value of the
|
||||
attribute is unloaded.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.AttributeState.history`
|
||||
|
||||
:func:`.attributes.get_history` - underlying function
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
"""
|
||||
return self.state.get_history(self.key,
|
||||
PASSIVE_OFF ^ INIT_OK)
|
||||
|
||||
|
||||
|
||||
class PendingCollection(object):
|
||||
"""A writable placeholder for an unloaded collection.
|
||||
|
@ -10,20 +10,20 @@
|
||||
from .. import exc as sa_exc, inspect
|
||||
from .. import util, log, event
|
||||
from ..sql import util as sql_util, visitors
|
||||
from .. import sql
|
||||
from . import (
|
||||
attributes, interfaces, exc as orm_exc, loading,
|
||||
unitofwork, util as orm_util
|
||||
)
|
||||
from .state import InstanceState
|
||||
from .util import _none_set
|
||||
from . import properties
|
||||
from .interfaces import (
|
||||
LoaderStrategy, StrategizedProperty
|
||||
LoaderStrategy, StrategizedOption, MapperOption, PropertyOption,
|
||||
StrategizedProperty
|
||||
)
|
||||
from .session import _state_session
|
||||
import itertools
|
||||
|
||||
|
||||
def _register_attribute(strategy, mapper, useobject,
|
||||
compare_function=None,
|
||||
typecallable=None,
|
||||
@ -45,10 +45,10 @@ def _register_attribute(strategy, mapper, useobject,
|
||||
listen_hooks.append(single_parent_validator)
|
||||
|
||||
if prop.key in prop.parent.validators:
|
||||
fn, opts = prop.parent.validators[prop.key]
|
||||
fn, include_removes = prop.parent.validators[prop.key]
|
||||
listen_hooks.append(
|
||||
lambda desc, prop: orm_util._validator_events(desc,
|
||||
prop.key, fn, **opts)
|
||||
prop.key, fn, include_removes)
|
||||
)
|
||||
|
||||
if useobject:
|
||||
@ -81,7 +81,6 @@ def _register_attribute(strategy, mapper, useobject,
|
||||
callable_=callable_,
|
||||
active_history=active_history,
|
||||
impl_class=impl_class,
|
||||
send_modified_events=not useobject or not prop.viewonly,
|
||||
doc=prop.doc,
|
||||
**kw
|
||||
)
|
||||
@ -89,7 +88,7 @@ def _register_attribute(strategy, mapper, useobject,
|
||||
for hook in listen_hooks:
|
||||
hook(desc, prop)
|
||||
|
||||
@properties.ColumnProperty.strategy_for(instrument=False, deferred=False)
|
||||
|
||||
class UninstrumentedColumnLoader(LoaderStrategy):
|
||||
"""Represent the a non-instrumented MapperProperty.
|
||||
|
||||
@ -101,19 +100,17 @@ class UninstrumentedColumnLoader(LoaderStrategy):
|
||||
super(UninstrumentedColumnLoader, self).__init__(parent)
|
||||
self.columns = self.parent_property.columns
|
||||
|
||||
def setup_query(self, context, entity, path, loadopt, adapter,
|
||||
def setup_query(self, context, entity, path, adapter,
|
||||
column_collection=None, **kwargs):
|
||||
for c in self.columns:
|
||||
if adapter:
|
||||
c = adapter.columns[c]
|
||||
column_collection.append(c)
|
||||
|
||||
def create_row_processor(self, context, path, loadopt, mapper, row, adapter):
|
||||
def create_row_processor(self, context, path, mapper, row, adapter):
|
||||
return None, None, None
|
||||
|
||||
|
||||
@log.class_logger
|
||||
@properties.ColumnProperty.strategy_for(instrument=True, deferred=False)
|
||||
class ColumnLoader(LoaderStrategy):
|
||||
"""Provide loading behavior for a :class:`.ColumnProperty`."""
|
||||
|
||||
@ -122,7 +119,7 @@ class ColumnLoader(LoaderStrategy):
|
||||
self.columns = self.parent_property.columns
|
||||
self.is_composite = hasattr(self.parent_property, 'composite_class')
|
||||
|
||||
def setup_query(self, context, entity, path, loadopt,
|
||||
def setup_query(self, context, entity, path,
|
||||
adapter, column_collection, **kwargs):
|
||||
for c in self.columns:
|
||||
if adapter:
|
||||
@ -134,8 +131,7 @@ class ColumnLoader(LoaderStrategy):
|
||||
coltype = self.columns[0].type
|
||||
# TODO: check all columns ? check for foreign key as well?
|
||||
active_history = self.parent_property.active_history or \
|
||||
self.columns[0].primary_key or \
|
||||
mapper.version_id_col in set(self.columns)
|
||||
self.columns[0].primary_key
|
||||
|
||||
_register_attribute(self, mapper, useobject=False,
|
||||
compare_function=coltype.compare_values,
|
||||
@ -143,7 +139,7 @@ class ColumnLoader(LoaderStrategy):
|
||||
)
|
||||
|
||||
def create_row_processor(self, context, path,
|
||||
loadopt, mapper, row, adapter):
|
||||
mapper, row, adapter):
|
||||
key = self.key
|
||||
# look through list of columns represented here
|
||||
# to see which, if any, is present in the row.
|
||||
@ -160,9 +156,9 @@ class ColumnLoader(LoaderStrategy):
|
||||
return expire_for_non_present_col, None, None
|
||||
|
||||
|
||||
log.class_logger(ColumnLoader)
|
||||
|
||||
|
||||
@log.class_logger
|
||||
@properties.ColumnProperty.strategy_for(deferred=True, instrument=True)
|
||||
class DeferredColumnLoader(LoaderStrategy):
|
||||
"""Provide loading behavior for a deferred :class:`.ColumnProperty`."""
|
||||
|
||||
@ -174,16 +170,16 @@ class DeferredColumnLoader(LoaderStrategy):
|
||||
self.columns = self.parent_property.columns
|
||||
self.group = self.parent_property.group
|
||||
|
||||
def create_row_processor(self, context, path, loadopt, mapper, row, adapter):
|
||||
def create_row_processor(self, context, path, mapper, row, adapter):
|
||||
col = self.columns[0]
|
||||
if adapter:
|
||||
col = adapter.columns[col]
|
||||
|
||||
key = self.key
|
||||
if col in row:
|
||||
return self.parent_property._get_strategy_by_cls(ColumnLoader).\
|
||||
return self.parent_property._get_strategy(ColumnLoader).\
|
||||
create_row_processor(
|
||||
context, path, loadopt, mapper, row, adapter)
|
||||
context, path, mapper, row, adapter)
|
||||
|
||||
elif not self.is_class_level:
|
||||
set_deferred_for_local_state = InstanceState._row_processor(
|
||||
@ -206,15 +202,15 @@ class DeferredColumnLoader(LoaderStrategy):
|
||||
expire_missing=False
|
||||
)
|
||||
|
||||
def setup_query(self, context, entity, path, loadopt, adapter,
|
||||
def setup_query(self, context, entity, path, adapter,
|
||||
only_load_props=None, **kwargs):
|
||||
if (
|
||||
loadopt and self.group and
|
||||
loadopt.local_opts.get('undefer_group', False) == self.group
|
||||
self.group is not None and
|
||||
context.attributes.get(('undefer', self.group), False)
|
||||
) or (only_load_props and self.key in only_load_props):
|
||||
self.parent_property._get_strategy_by_cls(ColumnLoader).\
|
||||
self.parent_property._get_strategy(ColumnLoader).\
|
||||
setup_query(context, entity,
|
||||
path, loadopt, adapter, **kwargs)
|
||||
path, adapter, **kwargs)
|
||||
|
||||
def _load_for_state(self, state, passive):
|
||||
if not state.key:
|
||||
@ -255,6 +251,8 @@ class DeferredColumnLoader(LoaderStrategy):
|
||||
return attributes.ATTR_WAS_SET
|
||||
|
||||
|
||||
log.class_logger(DeferredColumnLoader)
|
||||
|
||||
|
||||
class LoadDeferredColumns(object):
|
||||
"""serializable loader object used by DeferredColumnLoader"""
|
||||
@ -271,6 +269,29 @@ class LoadDeferredColumns(object):
|
||||
return strategy._load_for_state(state, passive)
|
||||
|
||||
|
||||
class DeferredOption(StrategizedOption):
|
||||
propagate_to_loaders = True
|
||||
|
||||
def __init__(self, key, defer=False):
|
||||
super(DeferredOption, self).__init__(key)
|
||||
self.defer = defer
|
||||
|
||||
def get_strategy_class(self):
|
||||
if self.defer:
|
||||
return DeferredColumnLoader
|
||||
else:
|
||||
return ColumnLoader
|
||||
|
||||
|
||||
class UndeferGroupOption(MapperOption):
|
||||
propagate_to_loaders = True
|
||||
|
||||
def __init__(self, group):
|
||||
self.group = group
|
||||
|
||||
def process_query(self, query):
|
||||
query._attributes[("undefer", self.group)] = True
|
||||
|
||||
|
||||
class AbstractRelationshipLoader(LoaderStrategy):
|
||||
"""LoaderStratgies which deal with related objects."""
|
||||
@ -283,9 +304,6 @@ class AbstractRelationshipLoader(LoaderStrategy):
|
||||
|
||||
|
||||
|
||||
@log.class_logger
|
||||
@properties.RelationshipProperty.strategy_for(lazy="noload")
|
||||
@properties.RelationshipProperty.strategy_for(lazy=None)
|
||||
class NoLoader(AbstractRelationshipLoader):
|
||||
"""Provide loading behavior for a :class:`.RelationshipProperty`
|
||||
with "lazy=None".
|
||||
@ -301,16 +319,15 @@ class NoLoader(AbstractRelationshipLoader):
|
||||
typecallable=self.parent_property.collection_class,
|
||||
)
|
||||
|
||||
def create_row_processor(self, context, path, loadopt, mapper, row, adapter):
|
||||
def create_row_processor(self, context, path, mapper, row, adapter):
|
||||
def invoke_no_load(state, dict_, row):
|
||||
state._initialize(self.key)
|
||||
return invoke_no_load, None, None
|
||||
|
||||
|
||||
log.class_logger(NoLoader)
|
||||
|
||||
|
||||
@log.class_logger
|
||||
@properties.RelationshipProperty.strategy_for(lazy=True)
|
||||
@properties.RelationshipProperty.strategy_for(lazy="select")
|
||||
class LazyLoader(AbstractRelationshipLoader):
|
||||
"""Provide loading behavior for a :class:`.RelationshipProperty`
|
||||
with "lazy=True", that is loads when first accessed.
|
||||
@ -333,6 +350,7 @@ class LazyLoader(AbstractRelationshipLoader):
|
||||
|
||||
# determine if our "lazywhere" clause is the same as the mapper's
|
||||
# get() clause. then we can just use mapper.get()
|
||||
#from sqlalchemy.orm import query
|
||||
self.use_get = not self.uselist and \
|
||||
self.mapper._get_clause[0].compare(
|
||||
self._lazywhere,
|
||||
@ -341,7 +359,7 @@ class LazyLoader(AbstractRelationshipLoader):
|
||||
)
|
||||
|
||||
if self.use_get:
|
||||
for col in list(self._equated_columns):
|
||||
for col in self._equated_columns.keys():
|
||||
if col in self.mapper._equivalent_columns:
|
||||
for c in self.mapper._equivalent_columns[col]:
|
||||
self._equated_columns[c] = self._equated_columns[col]
|
||||
@ -524,22 +542,17 @@ class LazyLoader(AbstractRelationshipLoader):
|
||||
for pk in self.mapper.primary_key
|
||||
]
|
||||
|
||||
@util.dependencies("sqlalchemy.orm.strategy_options")
|
||||
def _emit_lazyload(self, strategy_options, session, state, ident_key, passive):
|
||||
def _emit_lazyload(self, session, state, ident_key, passive):
|
||||
q = session.query(self.mapper)._adapt_all_clauses()
|
||||
|
||||
if self.parent_property.secondary is not None:
|
||||
q = q.select_from(self.mapper, self.parent_property.secondary)
|
||||
|
||||
q = q._with_invoke_all_eagers(False)
|
||||
|
||||
pending = not state.key
|
||||
|
||||
# don't autoflush on pending
|
||||
if pending or passive & attributes.NO_AUTOFLUSH:
|
||||
if pending:
|
||||
q = q.autoflush(False)
|
||||
|
||||
|
||||
if state.load_path:
|
||||
q = q._with_current_path(state.load_path[self.parent_property])
|
||||
|
||||
@ -558,7 +571,7 @@ class LazyLoader(AbstractRelationshipLoader):
|
||||
if rev.direction is interfaces.MANYTOONE and \
|
||||
rev._use_get and \
|
||||
not isinstance(rev.strategy, LazyLoader):
|
||||
q = q.options(strategy_options.Load(rev.parent).lazyload(rev.key))
|
||||
q = q.options(EagerLazyOption((rev.key,), lazy='select'))
|
||||
|
||||
lazy_clause = self.lazy_clause(state, passive=passive)
|
||||
|
||||
@ -569,7 +582,6 @@ class LazyLoader(AbstractRelationshipLoader):
|
||||
|
||||
q = q.filter(lazy_clause)
|
||||
|
||||
|
||||
result = q.all()
|
||||
if self.uselist:
|
||||
return result
|
||||
@ -586,7 +598,7 @@ class LazyLoader(AbstractRelationshipLoader):
|
||||
else:
|
||||
return None
|
||||
|
||||
def create_row_processor(self, context, path, loadopt,
|
||||
def create_row_processor(self, context, path,
|
||||
mapper, row, adapter):
|
||||
key = self.key
|
||||
if not self.is_class_level:
|
||||
@ -618,6 +630,8 @@ class LazyLoader(AbstractRelationshipLoader):
|
||||
return reset_for_lazy_callable, None, None
|
||||
|
||||
|
||||
log.class_logger(LazyLoader)
|
||||
|
||||
|
||||
class LoadLazyAttribute(object):
|
||||
"""serializable loader object used by LazyLoader"""
|
||||
@ -634,19 +648,18 @@ class LoadLazyAttribute(object):
|
||||
return strategy._load_for_state(state, passive)
|
||||
|
||||
|
||||
@properties.RelationshipProperty.strategy_for(lazy="immediate")
|
||||
class ImmediateLoader(AbstractRelationshipLoader):
|
||||
def init_class_attribute(self, mapper):
|
||||
self.parent_property.\
|
||||
_get_strategy_by_cls(LazyLoader).\
|
||||
_get_strategy(LazyLoader).\
|
||||
init_class_attribute(mapper)
|
||||
|
||||
def setup_query(self, context, entity,
|
||||
path, loadopt, adapter, column_collection=None,
|
||||
path, adapter, column_collection=None,
|
||||
parentmapper=None, **kwargs):
|
||||
pass
|
||||
|
||||
def create_row_processor(self, context, path, loadopt,
|
||||
def create_row_processor(self, context, path,
|
||||
mapper, row, adapter):
|
||||
def load_immediate(state, dict_, row):
|
||||
state.get_impl(self.key).get(state, dict_)
|
||||
@ -654,8 +667,6 @@ class ImmediateLoader(AbstractRelationshipLoader):
|
||||
return None, None, load_immediate
|
||||
|
||||
|
||||
@log.class_logger
|
||||
@properties.RelationshipProperty.strategy_for(lazy="subquery")
|
||||
class SubqueryLoader(AbstractRelationshipLoader):
|
||||
def __init__(self, parent):
|
||||
super(SubqueryLoader, self).__init__(parent)
|
||||
@ -663,11 +674,11 @@ class SubqueryLoader(AbstractRelationshipLoader):
|
||||
|
||||
def init_class_attribute(self, mapper):
|
||||
self.parent_property.\
|
||||
_get_strategy_by_cls(LazyLoader).\
|
||||
_get_strategy(LazyLoader).\
|
||||
init_class_attribute(mapper)
|
||||
|
||||
def setup_query(self, context, entity,
|
||||
path, loadopt, adapter,
|
||||
path, adapter,
|
||||
column_collection=None,
|
||||
parentmapper=None, **kwargs):
|
||||
|
||||
@ -678,8 +689,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
|
||||
|
||||
# build up a path indicating the path from the leftmost
|
||||
# entity to the thing we're subquery loading.
|
||||
with_poly_info = path.get(context.attributes,
|
||||
"path_with_polymorphic", None)
|
||||
with_poly_info = path.get(context, "path_with_polymorphic", None)
|
||||
if with_poly_info is not None:
|
||||
effective_entity = with_poly_info.entity
|
||||
else:
|
||||
@ -692,7 +702,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
|
||||
|
||||
# if not via query option, check for
|
||||
# a cycle
|
||||
if not path.contains(context.attributes, "loader"):
|
||||
if not path.contains(context, "loaderstrategy"):
|
||||
if self.join_depth:
|
||||
if path.length / 2 > self.join_depth:
|
||||
return
|
||||
@ -739,7 +749,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
|
||||
|
||||
# add new query to attributes to be picked up
|
||||
# by create_row_processor
|
||||
path.set(context.attributes, "subquery", q)
|
||||
path.set(context, "subquery", q)
|
||||
|
||||
def _get_leftmost(self, subq_path):
|
||||
subq_path = subq_path.path
|
||||
@ -950,7 +960,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
|
||||
if self._data is None:
|
||||
self._load()
|
||||
|
||||
def create_row_processor(self, context, path, loadopt,
|
||||
def create_row_processor(self, context, path,
|
||||
mapper, row, adapter):
|
||||
if not self.parent.class_manager[self.key].impl.supports_population:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
@ -960,7 +970,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
|
||||
|
||||
path = path[self.parent_property]
|
||||
|
||||
subq = path.get(context.attributes, 'subquery')
|
||||
subq = path.get(context, 'subquery')
|
||||
|
||||
if subq is None:
|
||||
return None, None, None
|
||||
@ -970,10 +980,10 @@ class SubqueryLoader(AbstractRelationshipLoader):
|
||||
# cache the loaded collections in the context
|
||||
# so that inheriting mappers don't re-load when they
|
||||
# call upon create_row_processor again
|
||||
collections = path.get(context.attributes, "collections")
|
||||
collections = path.get(context, "collections")
|
||||
if collections is None:
|
||||
collections = self._SubqCollections(subq)
|
||||
path.set(context.attributes, 'collections', collections)
|
||||
path.set(context, 'collections', collections)
|
||||
|
||||
if adapter:
|
||||
local_cols = [adapter.columns[c] for c in local_cols]
|
||||
@ -1013,10 +1023,9 @@ class SubqueryLoader(AbstractRelationshipLoader):
|
||||
return load_scalar_from_subq, None, None, collections.loader
|
||||
|
||||
|
||||
log.class_logger(SubqueryLoader)
|
||||
|
||||
|
||||
@log.class_logger
|
||||
@properties.RelationshipProperty.strategy_for(lazy="joined")
|
||||
@properties.RelationshipProperty.strategy_for(lazy=False)
|
||||
class JoinedLoader(AbstractRelationshipLoader):
|
||||
"""Provide loading behavior for a :class:`.RelationshipProperty`
|
||||
using joined eager loading.
|
||||
@ -1028,12 +1037,13 @@ class JoinedLoader(AbstractRelationshipLoader):
|
||||
|
||||
def init_class_attribute(self, mapper):
|
||||
self.parent_property.\
|
||||
_get_strategy_by_cls(LazyLoader).init_class_attribute(mapper)
|
||||
_get_strategy(LazyLoader).init_class_attribute(mapper)
|
||||
|
||||
def setup_query(self, context, entity, path, loadopt, adapter, \
|
||||
def setup_query(self, context, entity, path, adapter, \
|
||||
column_collection=None, parentmapper=None,
|
||||
allow_innerjoin=True,
|
||||
**kwargs):
|
||||
"""Add a left outer join to the statement that's being constructed."""
|
||||
"""Add a left outer join to the statement thats being constructed."""
|
||||
|
||||
if not context.query._enable_eagerloads:
|
||||
return
|
||||
@ -1042,32 +1052,33 @@ class JoinedLoader(AbstractRelationshipLoader):
|
||||
|
||||
with_polymorphic = None
|
||||
|
||||
user_defined_adapter = self._init_user_defined_eager_proc(
|
||||
loadopt, context) if loadopt else False
|
||||
|
||||
user_defined_adapter = path.get(context,
|
||||
"user_defined_eager_row_processor",
|
||||
False)
|
||||
if user_defined_adapter is not False:
|
||||
clauses, adapter, add_to_collection = \
|
||||
self._setup_query_on_user_defined_adapter(
|
||||
self._get_user_defined_adapter(
|
||||
context, entity, path, adapter,
|
||||
user_defined_adapter
|
||||
)
|
||||
else:
|
||||
# if not via query option, check for
|
||||
# a cycle
|
||||
if not path.contains(context.attributes, "loader"):
|
||||
if not path.contains(context, "loaderstrategy"):
|
||||
if self.join_depth:
|
||||
if path.length / 2 > self.join_depth:
|
||||
return
|
||||
elif path.contains_mapper(self.mapper):
|
||||
return
|
||||
|
||||
clauses, adapter, add_to_collection = self._generate_row_adapter(
|
||||
context, entity, path, loadopt, adapter,
|
||||
column_collection, parentmapper
|
||||
clauses, adapter, add_to_collection, \
|
||||
allow_innerjoin = self._generate_row_adapter(
|
||||
context, entity, path, adapter,
|
||||
column_collection, parentmapper, allow_innerjoin
|
||||
)
|
||||
|
||||
with_poly_info = path.get(
|
||||
context.attributes,
|
||||
context,
|
||||
"path_with_polymorphic",
|
||||
None
|
||||
)
|
||||
@ -1086,88 +1097,31 @@ class JoinedLoader(AbstractRelationshipLoader):
|
||||
path,
|
||||
clauses,
|
||||
parentmapper=self.mapper,
|
||||
column_collection=add_to_collection)
|
||||
column_collection=add_to_collection,
|
||||
allow_innerjoin=allow_innerjoin)
|
||||
|
||||
if with_poly_info is not None and \
|
||||
None in set(context.secondary_columns):
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Detected unaliased columns when generating joined "
|
||||
"load. Make sure to use aliased=True or flat=True "
|
||||
"when using joined loading with with_polymorphic()."
|
||||
)
|
||||
|
||||
def _init_user_defined_eager_proc(self, loadopt, context):
|
||||
|
||||
# check if the opt applies at all
|
||||
if "eager_from_alias" not in loadopt.local_opts:
|
||||
# nope
|
||||
return False
|
||||
|
||||
path = loadopt.path.parent
|
||||
|
||||
# the option applies. check if the "user_defined_eager_row_processor"
|
||||
# has been built up.
|
||||
adapter = path.get(context.attributes,
|
||||
"user_defined_eager_row_processor", False)
|
||||
if adapter is not False:
|
||||
# just return it
|
||||
return adapter
|
||||
|
||||
# otherwise figure it out.
|
||||
alias = loadopt.local_opts["eager_from_alias"]
|
||||
|
||||
root_mapper, prop = path[-2:]
|
||||
|
||||
#from .mapper import Mapper
|
||||
#from .interfaces import MapperProperty
|
||||
#assert isinstance(root_mapper, Mapper)
|
||||
#assert isinstance(prop, MapperProperty)
|
||||
|
||||
if alias is not None:
|
||||
if isinstance(alias, str):
|
||||
alias = prop.target.alias(alias)
|
||||
adapter = sql_util.ColumnAdapter(alias,
|
||||
equivalents=prop.mapper._equivalent_columns)
|
||||
else:
|
||||
if path.contains(context.attributes, "path_with_polymorphic"):
|
||||
with_poly_info = path.get(context.attributes,
|
||||
"path_with_polymorphic")
|
||||
adapter = orm_util.ORMAdapter(
|
||||
with_poly_info.entity,
|
||||
equivalents=prop.mapper._equivalent_columns)
|
||||
else:
|
||||
adapter = context.query._polymorphic_adapters.get(prop.mapper, None)
|
||||
path.set(context.attributes,
|
||||
"user_defined_eager_row_processor",
|
||||
adapter)
|
||||
|
||||
return adapter
|
||||
|
||||
def _setup_query_on_user_defined_adapter(self, context, entity,
|
||||
def _get_user_defined_adapter(self, context, entity,
|
||||
path, adapter, user_defined_adapter):
|
||||
|
||||
# apply some more wrapping to the "user defined adapter"
|
||||
# if we are setting up the query for SQL render.
|
||||
adapter = entity._get_entity_clauses(context.query, context)
|
||||
adapter = entity._get_entity_clauses(context.query, context)
|
||||
if adapter and user_defined_adapter:
|
||||
user_defined_adapter = user_defined_adapter.wrap(adapter)
|
||||
path.set(context, "user_defined_eager_row_processor",
|
||||
user_defined_adapter)
|
||||
elif adapter:
|
||||
user_defined_adapter = adapter
|
||||
path.set(context, "user_defined_eager_row_processor",
|
||||
user_defined_adapter)
|
||||
|
||||
if adapter and user_defined_adapter:
|
||||
user_defined_adapter = user_defined_adapter.wrap(adapter)
|
||||
path.set(context.attributes, "user_defined_eager_row_processor",
|
||||
user_defined_adapter)
|
||||
elif adapter:
|
||||
user_defined_adapter = adapter
|
||||
path.set(context.attributes, "user_defined_eager_row_processor",
|
||||
user_defined_adapter)
|
||||
|
||||
add_to_collection = context.primary_columns
|
||||
return user_defined_adapter, adapter, add_to_collection
|
||||
add_to_collection = context.primary_columns
|
||||
return user_defined_adapter, adapter, add_to_collection
|
||||
|
||||
def _generate_row_adapter(self,
|
||||
context, entity, path, loadopt, adapter,
|
||||
column_collection, parentmapper
|
||||
context, entity, path, adapter,
|
||||
column_collection, parentmapper, allow_innerjoin
|
||||
):
|
||||
with_poly_info = path.get(
|
||||
context.attributes,
|
||||
context,
|
||||
"path_with_polymorphic",
|
||||
None
|
||||
)
|
||||
@ -1175,7 +1129,6 @@ class JoinedLoader(AbstractRelationshipLoader):
|
||||
to_adapt = with_poly_info.entity
|
||||
else:
|
||||
to_adapt = orm_util.AliasedClass(self.mapper,
|
||||
flat=True,
|
||||
use_mapper_path=True)
|
||||
clauses = orm_util.ORMAdapter(
|
||||
to_adapt,
|
||||
@ -1186,12 +1139,13 @@ class JoinedLoader(AbstractRelationshipLoader):
|
||||
if self.parent_property.direction != interfaces.MANYTOONE:
|
||||
context.multi_row_eager_loaders = True
|
||||
|
||||
innerjoin = (
|
||||
loadopt.local_opts.get(
|
||||
'innerjoin', self.parent_property.innerjoin)
|
||||
if loadopt is not None
|
||||
else self.parent_property.innerjoin
|
||||
)
|
||||
innerjoin = allow_innerjoin and path.get(context,
|
||||
"eager_join_type",
|
||||
self.parent_property.innerjoin)
|
||||
if not innerjoin:
|
||||
# if this is an outer join, all eager joins from
|
||||
# here must also be outer joins
|
||||
allow_innerjoin = False
|
||||
|
||||
context.create_eager_joins.append(
|
||||
(self._create_eager_join, context,
|
||||
@ -1200,9 +1154,9 @@ class JoinedLoader(AbstractRelationshipLoader):
|
||||
)
|
||||
|
||||
add_to_collection = context.secondary_columns
|
||||
path.set(context.attributes, "eager_row_processor", clauses)
|
||||
path.set(context, "eager_row_processor", clauses)
|
||||
|
||||
return clauses, adapter, add_to_collection
|
||||
return clauses, adapter, add_to_collection, allow_innerjoin
|
||||
|
||||
def _create_eager_join(self, context, entity,
|
||||
path, adapter, parentmapper,
|
||||
@ -1258,34 +1212,13 @@ class JoinedLoader(AbstractRelationshipLoader):
|
||||
onclause = self.parent_property
|
||||
|
||||
assert clauses.aliased_class is not None
|
||||
|
||||
join_to_outer = innerjoin and isinstance(towrap, sql.Join) and towrap.isouter
|
||||
|
||||
if join_to_outer and innerjoin == 'nested':
|
||||
inner = orm_util.join(
|
||||
towrap.right,
|
||||
clauses.aliased_class,
|
||||
onclause,
|
||||
isouter=False
|
||||
)
|
||||
|
||||
eagerjoin = orm_util.join(
|
||||
towrap.left,
|
||||
inner,
|
||||
towrap.onclause,
|
||||
isouter=True
|
||||
)
|
||||
eagerjoin._target_adapter = inner._target_adapter
|
||||
else:
|
||||
if join_to_outer:
|
||||
innerjoin = False
|
||||
eagerjoin = orm_util.join(
|
||||
towrap,
|
||||
clauses.aliased_class,
|
||||
onclause,
|
||||
isouter=not innerjoin
|
||||
)
|
||||
context.eager_joins[entity_key] = eagerjoin
|
||||
context.eager_joins[entity_key] = eagerjoin = \
|
||||
orm_util.join(
|
||||
towrap,
|
||||
clauses.aliased_class,
|
||||
onclause,
|
||||
isouter=not innerjoin
|
||||
)
|
||||
|
||||
# send a hint to the Query as to where it may "splice" this join
|
||||
eagerjoin.stop_on = entity.selectable
|
||||
@ -1299,7 +1232,7 @@ class JoinedLoader(AbstractRelationshipLoader):
|
||||
# by the Query propagates those columns outward.
|
||||
# This has the effect
|
||||
# of "undefering" those columns.
|
||||
for col in sql_util._find_columns(
|
||||
for col in sql_util.find_columns(
|
||||
self.parent_property.primaryjoin):
|
||||
if localparent.mapped_table.c.contains_column(col):
|
||||
if adapter:
|
||||
@ -1315,10 +1248,10 @@ class JoinedLoader(AbstractRelationshipLoader):
|
||||
)
|
||||
)
|
||||
|
||||
def _create_eager_adapter(self, context, row, adapter, path, loadopt):
|
||||
user_defined_adapter = self._init_user_defined_eager_proc(
|
||||
loadopt, context) if loadopt else False
|
||||
|
||||
def _create_eager_adapter(self, context, row, adapter, path):
|
||||
user_defined_adapter = path.get(context,
|
||||
"user_defined_eager_row_processor",
|
||||
False)
|
||||
if user_defined_adapter is not False:
|
||||
decorator = user_defined_adapter
|
||||
# user defined eagerloads are part of the "primary"
|
||||
@ -1329,7 +1262,7 @@ class JoinedLoader(AbstractRelationshipLoader):
|
||||
elif context.adapter:
|
||||
decorator = context.adapter
|
||||
else:
|
||||
decorator = path.get(context.attributes, "eager_row_processor")
|
||||
decorator = path.get(context, "eager_row_processor")
|
||||
if decorator is None:
|
||||
return False
|
||||
|
||||
@ -1337,11 +1270,11 @@ class JoinedLoader(AbstractRelationshipLoader):
|
||||
self.mapper.identity_key_from_row(row, decorator)
|
||||
return decorator
|
||||
except KeyError:
|
||||
# no identity key - don't return a row
|
||||
# no identity key - dont return a row
|
||||
# processor, will cause a degrade to lazy
|
||||
return False
|
||||
|
||||
def create_row_processor(self, context, path, loadopt, mapper, row, adapter):
|
||||
def create_row_processor(self, context, path, mapper, row, adapter):
|
||||
if not self.parent.class_manager[self.key].impl.supports_population:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"'%s' does not support object "
|
||||
@ -1353,7 +1286,7 @@ class JoinedLoader(AbstractRelationshipLoader):
|
||||
eager_adapter = self._create_eager_adapter(
|
||||
context,
|
||||
row,
|
||||
adapter, our_path, loadopt)
|
||||
adapter, our_path)
|
||||
|
||||
if eager_adapter is not False:
|
||||
key = self.key
|
||||
@ -1370,9 +1303,9 @@ class JoinedLoader(AbstractRelationshipLoader):
|
||||
return self._create_collection_loader(context, key, _instance)
|
||||
else:
|
||||
return self.parent_property.\
|
||||
_get_strategy_by_cls(LazyLoader).\
|
||||
_get_strategy(LazyLoader).\
|
||||
create_row_processor(
|
||||
context, path, loadopt,
|
||||
context, path,
|
||||
mapper, row, adapter)
|
||||
|
||||
def _create_collection_loader(self, context, key, _instance):
|
||||
@ -1433,6 +1366,100 @@ class JoinedLoader(AbstractRelationshipLoader):
|
||||
None, load_scalar_from_joined_exec
|
||||
|
||||
|
||||
log.class_logger(JoinedLoader)
|
||||
|
||||
|
||||
class EagerLazyOption(StrategizedOption):
|
||||
def __init__(self, key, lazy=True, chained=False,
|
||||
propagate_to_loaders=True
|
||||
):
|
||||
if isinstance(key[0], basestring) and key[0] == '*':
|
||||
if len(key) != 1:
|
||||
raise sa_exc.ArgumentError(
|
||||
"Wildcard identifier '*' must "
|
||||
"be specified alone.")
|
||||
key = ("relationship:*",)
|
||||
propagate_to_loaders = False
|
||||
super(EagerLazyOption, self).__init__(key)
|
||||
self.lazy = lazy
|
||||
self.chained = chained
|
||||
self.propagate_to_loaders = propagate_to_loaders
|
||||
self.strategy_cls = factory(lazy)
|
||||
|
||||
def get_strategy_class(self):
|
||||
return self.strategy_cls
|
||||
|
||||
_factory = {
|
||||
False: JoinedLoader,
|
||||
"joined": JoinedLoader,
|
||||
None: NoLoader,
|
||||
"noload": NoLoader,
|
||||
"select": LazyLoader,
|
||||
True: LazyLoader,
|
||||
"subquery": SubqueryLoader,
|
||||
"immediate": ImmediateLoader
|
||||
}
|
||||
|
||||
|
||||
def factory(identifier):
|
||||
return _factory.get(identifier, LazyLoader)
|
||||
|
||||
|
||||
class EagerJoinOption(PropertyOption):
|
||||
|
||||
def __init__(self, key, innerjoin, chained=False):
|
||||
super(EagerJoinOption, self).__init__(key)
|
||||
self.innerjoin = innerjoin
|
||||
self.chained = chained
|
||||
|
||||
def process_query_property(self, query, paths):
|
||||
if self.chained:
|
||||
for path in paths:
|
||||
path.set(query, "eager_join_type", self.innerjoin)
|
||||
else:
|
||||
paths[-1].set(query, "eager_join_type", self.innerjoin)
|
||||
|
||||
|
||||
class LoadEagerFromAliasOption(PropertyOption):
|
||||
|
||||
def __init__(self, key, alias=None, chained=False):
|
||||
super(LoadEagerFromAliasOption, self).__init__(key)
|
||||
if alias is not None:
|
||||
if not isinstance(alias, basestring):
|
||||
info = inspect(alias)
|
||||
alias = info.selectable
|
||||
self.alias = alias
|
||||
self.chained = chained
|
||||
|
||||
def process_query_property(self, query, paths):
|
||||
if self.chained:
|
||||
for path in paths[0:-1]:
|
||||
(root_mapper, prop) = path.path[-2:]
|
||||
adapter = query._polymorphic_adapters.get(prop.mapper, None)
|
||||
path.setdefault(query,
|
||||
"user_defined_eager_row_processor",
|
||||
adapter)
|
||||
|
||||
root_mapper, prop = paths[-1].path[-2:]
|
||||
if self.alias is not None:
|
||||
if isinstance(self.alias, basestring):
|
||||
self.alias = prop.target.alias(self.alias)
|
||||
paths[-1].set(query, "user_defined_eager_row_processor",
|
||||
sql_util.ColumnAdapter(self.alias,
|
||||
equivalents=prop.mapper._equivalent_columns)
|
||||
)
|
||||
else:
|
||||
if paths[-1].contains(query, "path_with_polymorphic"):
|
||||
with_poly_info = paths[-1].get(query, "path_with_polymorphic")
|
||||
adapter = orm_util.ORMAdapter(
|
||||
with_poly_info.entity,
|
||||
equivalents=prop.mapper._equivalent_columns,
|
||||
adapt_required=True)
|
||||
else:
|
||||
adapter = query._polymorphic_adapters.get(prop.mapper, None)
|
||||
paths[-1].set(query, "user_defined_eager_row_processor",
|
||||
adapter)
|
||||
|
||||
|
||||
def single_parent_validator(desc, prop):
|
||||
def _do_check(state, value, oldvalue, initiator):
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user