Coverage for /home/martinb/.local/share/virtualenvs/camcops/lib/python3.6/site-packages/sqlalchemy/sql/ddl.py : 39%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# sql/ddl.py
2# Copyright (C) 2009-2020 the SQLAlchemy authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of SQLAlchemy and is released under
6# the MIT License: http://www.opensource.org/licenses/mit-license.php
7"""
8Provides the hierarchy of DDL-defining schema items as well as routines
9to invoke them for a create/drop call.
11"""
13from .base import _bind_or_error
14from .base import _generative
15from .base import Executable
16from .base import SchemaVisitor
17from .elements import ClauseElement
18from .. import event
19from .. import exc
20from .. import util
21from ..util import topological
24class _DDLCompiles(ClauseElement):
25 def _compiler(self, dialect, **kw):
26 """Return a compiler appropriate for this ClauseElement, given a
27 Dialect."""
29 return dialect.ddl_compiler(dialect, self, **kw)
32class DDLElement(Executable, _DDLCompiles):
33 """Base class for DDL expression constructs.
35 This class is the base for the general purpose :class:`.DDL` class,
36 as well as the various create/drop clause constructs such as
37 :class:`.CreateTable`, :class:`.DropTable`, :class:`.AddConstraint`,
38 etc.
40 :class:`.DDLElement` integrates closely with SQLAlchemy events,
41 introduced in :ref:`event_toplevel`. An instance of one is
42 itself an event receiving callable::
44 event.listen(
45 users,
46 'after_create',
47 AddConstraint(constraint).execute_if(dialect='postgresql')
48 )
50 .. seealso::
52 :class:`.DDL`
54 :class:`.DDLEvents`
56 :ref:`event_toplevel`
58 :ref:`schema_ddl_sequences`
60 """
62 _execution_options = Executable._execution_options.union(
63 {"autocommit": True}
64 )
66 target = None
67 on = None
68 dialect = None
69 callable_ = None
71 def _execute_on_connection(self, connection, multiparams, params):
72 return connection._execute_ddl(self, multiparams, params)
74 def execute(self, bind=None, target=None):
75 """Execute this DDL immediately.
77 Executes the DDL statement in isolation using the supplied
78 :class:`.Connectable` or
79 :class:`.Connectable` assigned to the ``.bind``
80 property, if not supplied. If the DDL has a conditional ``on``
81 criteria, it will be invoked with None as the event.
83 :param bind:
84 Optional, an ``Engine`` or ``Connection``. If not supplied, a valid
85 :class:`.Connectable` must be present in the
86 ``.bind`` property.
88 :param target:
89 Optional, defaults to None. The target SchemaItem for the
90 execute call. Will be passed to the ``on`` callable if any,
91 and may also provide string expansion data for the
92 statement. See ``execute_at`` for more information.
94 """
96 if bind is None:
97 bind = _bind_or_error(self)
99 if self._should_execute(target, bind):
100 return bind.execute(self.against(target))
101 else:
102 bind.engine.logger.info("DDL execution skipped, criteria not met.")
104 @util.deprecated(
105 "0.7",
106 "The :meth:`.DDLElement.execute_at` method is deprecated and will "
107 "be removed in a future release. Please use the :class:`.DDLEvents` "
108 "listener interface in conjunction with the "
109 ":meth:`.DDLElement.execute_if` method.",
110 )
111 def execute_at(self, event_name, target):
112 """Link execution of this DDL to the DDL lifecycle of a SchemaItem.
114 Links this ``DDLElement`` to a ``Table`` or ``MetaData`` instance,
115 executing it when that schema item is created or dropped. The DDL
116 statement will be executed using the same Connection and transactional
117 context as the Table create/drop itself. The ``.bind`` property of
118 this statement is ignored.
120 :param event:
121 One of the events defined in the schema item's ``.ddl_events``;
122 e.g. 'before-create', 'after-create', 'before-drop' or 'after-drop'
124 :param target:
125 The Table or MetaData instance for which this DDLElement will
126 be associated with.
128 A DDLElement instance can be linked to any number of schema items.
130 ``execute_at`` builds on the ``append_ddl_listener`` interface of
131 :class:`_schema.MetaData` and :class:`_schema.Table` objects.
133 Caveat: Creating or dropping a Table in isolation will also trigger
134 any DDL set to ``execute_at`` that Table's MetaData. This may change
135 in a future release.
137 """
139 def call_event(target, connection, **kw):
140 if self._should_execute_deprecated(
141 event_name, target, connection, **kw
142 ):
143 return connection.execute(self.against(target))
145 event.listen(target, "" + event_name.replace("-", "_"), call_event)
147 @_generative
148 def against(self, target):
149 """Return a copy of this DDL against a specific schema item."""
151 self.target = target
153 @_generative
154 def execute_if(self, dialect=None, callable_=None, state=None):
155 r"""Return a callable that will execute this
156 DDLElement conditionally.
158 Used to provide a wrapper for event listening::
160 event.listen(
161 metadata,
162 'before_create',
163 DDL("my_ddl").execute_if(dialect='postgresql')
164 )
166 :param dialect: May be a string, tuple or a callable
167 predicate. If a string, it will be compared to the name of the
168 executing database dialect::
170 DDL('something').execute_if(dialect='postgresql')
172 If a tuple, specifies multiple dialect names::
174 DDL('something').execute_if(dialect=('postgresql', 'mysql'))
176 :param callable\_: A callable, which will be invoked with
177 four positional arguments as well as optional keyword
178 arguments:
180 :ddl:
181 This DDL element.
183 :target:
184 The :class:`_schema.Table` or :class:`_schema.MetaData`
185 object which is the
186 target of this event. May be None if the DDL is executed
187 explicitly.
189 :bind:
190 The :class:`_engine.Connection` being used for DDL execution
192 :tables:
193 Optional keyword argument - a list of Table objects which are to
194 be created/ dropped within a MetaData.create_all() or drop_all()
195 method call.
197 :state:
198 Optional keyword argument - will be the ``state`` argument
199 passed to this function.
201 :checkfirst:
202 Keyword argument, will be True if the 'checkfirst' flag was
203 set during the call to ``create()``, ``create_all()``,
204 ``drop()``, ``drop_all()``.
206 If the callable returns a true value, the DDL statement will be
207 executed.
209 :param state: any value which will be passed to the callable\_
210 as the ``state`` keyword argument.
212 .. seealso::
214 :class:`.DDLEvents`
216 :ref:`event_toplevel`
218 """
219 self.dialect = dialect
220 self.callable_ = callable_
221 self.state = state
223 def _should_execute(self, target, bind, **kw):
224 if self.on is not None and not self._should_execute_deprecated(
225 None, target, bind, **kw
226 ):
227 return False
229 if isinstance(self.dialect, util.string_types):
230 if self.dialect != bind.engine.name:
231 return False
232 elif isinstance(self.dialect, (tuple, list, set)):
233 if bind.engine.name not in self.dialect:
234 return False
235 if self.callable_ is not None and not self.callable_(
236 self, target, bind, state=self.state, **kw
237 ):
238 return False
240 return True
242 def _should_execute_deprecated(self, event, target, bind, **kw):
243 if self.on is None:
244 return True
245 elif isinstance(self.on, util.string_types):
246 return self.on == bind.engine.name
247 elif isinstance(self.on, (tuple, list, set)):
248 return bind.engine.name in self.on
249 else:
250 return self.on(self, event, target, bind, **kw)
252 def __call__(self, target, bind, **kw):
253 """Execute the DDL as a ddl_listener."""
255 if self._should_execute(target, bind, **kw):
256 return bind.execute(self.against(target))
258 def _check_ddl_on(self, on):
259 if on is not None and (
260 not isinstance(on, util.string_types + (tuple, list, set))
261 and not util.callable(on)
262 ):
263 raise exc.ArgumentError(
264 "Expected the name of a database dialect, a tuple "
265 "of names, or a callable for "
266 "'on' criteria, got type '%s'." % type(on).__name__
267 )
269 def bind(self):
270 if self._bind:
271 return self._bind
273 def _set_bind(self, bind):
274 self._bind = bind
276 bind = property(bind, _set_bind)
278 def _generate(self):
279 s = self.__class__.__new__(self.__class__)
280 s.__dict__ = self.__dict__.copy()
281 return s
284class DDL(DDLElement):
285 """A literal DDL statement.
287 Specifies literal SQL DDL to be executed by the database. DDL objects
288 function as DDL event listeners, and can be subscribed to those events
289 listed in :class:`.DDLEvents`, using either :class:`_schema.Table` or
290 :class:`_schema.MetaData` objects as targets.
291 Basic templating support allows
292 a single DDL instance to handle repetitive tasks for multiple tables.
294 Examples::
296 from sqlalchemy import event, DDL
298 tbl = Table('users', metadata, Column('uid', Integer))
299 event.listen(tbl, 'before_create', DDL('DROP TRIGGER users_trigger'))
301 spow = DDL('ALTER TABLE %(table)s SET secretpowers TRUE')
302 event.listen(tbl, 'after_create', spow.execute_if(dialect='somedb'))
304 drop_spow = DDL('ALTER TABLE users SET secretpowers FALSE')
305 connection.execute(drop_spow)
307 When operating on Table events, the following ``statement``
308 string substitutions are available::
310 %(table)s - the Table name, with any required quoting applied
311 %(schema)s - the schema name, with any required quoting applied
312 %(fullname)s - the Table name including schema, quoted if needed
314 The DDL's "context", if any, will be combined with the standard
315 substitutions noted above. Keys present in the context will override
316 the standard substitutions.
318 """
320 __visit_name__ = "ddl"
322 @util.deprecated_params(
323 on=(
324 "0.7",
325 "The :paramref:`.DDL.on` parameter is deprecated and will be "
326 "removed in a future release. Please refer to "
327 ":meth:`.DDLElement.execute_if`.",
328 )
329 )
330 def __init__(self, statement, on=None, context=None, bind=None):
331 """Create a DDL statement.
333 :param statement:
334 A string or unicode string to be executed. Statements will be
335 processed with Python's string formatting operator. See the
336 ``context`` argument and the ``execute_at`` method.
338 A literal '%' in a statement must be escaped as '%%'.
340 SQL bind parameters are not available in DDL statements.
342 :param on:
344 Optional filtering criteria. May be a string, tuple or a callable
345 predicate. If a string, it will be compared to the name of the
346 executing database dialect::
348 DDL('something', on='postgresql')
350 If a tuple, specifies multiple dialect names::
352 DDL('something', on=('postgresql', 'mysql'))
354 If a callable, it will be invoked with four positional arguments
355 as well as optional keyword arguments:
357 :ddl:
358 This DDL element.
360 :event:
361 The name of the event that has triggered this DDL, such as
362 'after-create' Will be None if the DDL is executed explicitly.
364 :target:
365 The ``Table`` or ``MetaData`` object which is the target of
366 this event. May be None if the DDL is executed explicitly.
368 :connection:
369 The ``Connection`` being used for DDL execution
371 :tables:
372 Optional keyword argument - a list of Table objects which are to
373 be created/ dropped within a MetaData.create_all() or drop_all()
374 method call.
377 If the callable returns a true value, the DDL statement will be
378 executed.
380 :param context:
381 Optional dictionary, defaults to None. These values will be
382 available for use in string substitutions on the DDL statement.
384 :param bind:
385 Optional. A :class:`.Connectable`, used by
386 default when ``execute()`` is invoked without a bind argument.
389 .. seealso::
391 :class:`.DDLEvents`
393 :ref:`event_toplevel`
395 """
397 if not isinstance(statement, util.string_types):
398 raise exc.ArgumentError(
399 "Expected a string or unicode SQL statement, got '%r'"
400 % statement
401 )
403 self.statement = statement
404 self.context = context or {}
406 self._check_ddl_on(on)
407 self.on = on
408 self._bind = bind
410 def __repr__(self):
411 return "<%s@%s; %s>" % (
412 type(self).__name__,
413 id(self),
414 ", ".join(
415 [repr(self.statement)]
416 + [
417 "%s=%r" % (key, getattr(self, key))
418 for key in ("on", "context")
419 if getattr(self, key)
420 ]
421 ),
422 )
425class _CreateDropBase(DDLElement):
426 """Base class for DDL constructs that represent CREATE and DROP or
427 equivalents.
429 The common theme of _CreateDropBase is a single
430 ``element`` attribute which refers to the element
431 to be created or dropped.
433 """
435 def __init__(self, element, on=None, bind=None):
436 self.element = element
437 self._check_ddl_on(on)
438 self.on = on
439 self.bind = bind
441 def _create_rule_disable(self, compiler):
442 """Allow disable of _create_rule using a callable.
444 Pass to _create_rule using
445 util.portable_instancemethod(self._create_rule_disable)
446 to retain serializability.
448 """
449 return False
452class CreateSchema(_CreateDropBase):
453 """Represent a CREATE SCHEMA statement.
455 The argument here is the string name of the schema.
457 """
459 __visit_name__ = "create_schema"
461 def __init__(self, name, quote=None, **kw):
462 """Create a new :class:`.CreateSchema` construct."""
464 self.quote = quote
465 super(CreateSchema, self).__init__(name, **kw)
468class DropSchema(_CreateDropBase):
469 """Represent a DROP SCHEMA statement.
471 The argument here is the string name of the schema.
473 """
475 __visit_name__ = "drop_schema"
477 def __init__(self, name, quote=None, cascade=False, **kw):
478 """Create a new :class:`.DropSchema` construct."""
480 self.quote = quote
481 self.cascade = cascade
482 super(DropSchema, self).__init__(name, **kw)
485class CreateTable(_CreateDropBase):
486 """Represent a CREATE TABLE statement."""
488 __visit_name__ = "create_table"
490 def __init__(
491 self, element, on=None, bind=None, include_foreign_key_constraints=None
492 ):
493 """Create a :class:`.CreateTable` construct.
495 :param element: a :class:`_schema.Table` that's the subject
496 of the CREATE
497 :param on: See the description for 'on' in :class:`.DDL`.
498 :param bind: See the description for 'bind' in :class:`.DDL`.
499 :param include_foreign_key_constraints: optional sequence of
500 :class:`_schema.ForeignKeyConstraint` objects that will be included
501 inline within the CREATE construct; if omitted, all foreign key
502 constraints that do not specify use_alter=True are included.
504 .. versionadded:: 1.0.0
506 """
507 super(CreateTable, self).__init__(element, on=on, bind=bind)
508 self.columns = [CreateColumn(column) for column in element.columns]
509 self.include_foreign_key_constraints = include_foreign_key_constraints
512class _DropView(_CreateDropBase):
513 """Semi-public 'DROP VIEW' construct.
515 Used by the test suite for dialect-agnostic drops of views.
516 This object will eventually be part of a public "view" API.
518 """
520 __visit_name__ = "drop_view"
523class CreateColumn(_DDLCompiles):
524 """Represent a :class:`_schema.Column`
525 as rendered in a CREATE TABLE statement,
526 via the :class:`.CreateTable` construct.
528 This is provided to support custom column DDL within the generation
529 of CREATE TABLE statements, by using the
530 compiler extension documented in :ref:`sqlalchemy.ext.compiler_toplevel`
531 to extend :class:`.CreateColumn`.
533 Typical integration is to examine the incoming :class:`_schema.Column`
534 object, and to redirect compilation if a particular flag or condition
535 is found::
537 from sqlalchemy import schema
538 from sqlalchemy.ext.compiler import compiles
540 @compiles(schema.CreateColumn)
541 def compile(element, compiler, **kw):
542 column = element.element
544 if "special" not in column.info:
545 return compiler.visit_create_column(element, **kw)
547 text = "%s SPECIAL DIRECTIVE %s" % (
548 column.name,
549 compiler.type_compiler.process(column.type)
550 )
551 default = compiler.get_column_default_string(column)
552 if default is not None:
553 text += " DEFAULT " + default
555 if not column.nullable:
556 text += " NOT NULL"
558 if column.constraints:
559 text += " ".join(
560 compiler.process(const)
561 for const in column.constraints)
562 return text
564 The above construct can be applied to a :class:`_schema.Table`
565 as follows::
567 from sqlalchemy import Table, Metadata, Column, Integer, String
568 from sqlalchemy import schema
570 metadata = MetaData()
572 table = Table('mytable', MetaData(),
573 Column('x', Integer, info={"special":True}, primary_key=True),
574 Column('y', String(50)),
575 Column('z', String(20), info={"special":True})
576 )
578 metadata.create_all(conn)
580 Above, the directives we've added to the :attr:`_schema.Column.info`
581 collection
582 will be detected by our custom compilation scheme::
584 CREATE TABLE mytable (
585 x SPECIAL DIRECTIVE INTEGER NOT NULL,
586 y VARCHAR(50),
587 z SPECIAL DIRECTIVE VARCHAR(20),
588 PRIMARY KEY (x)
589 )
591 The :class:`.CreateColumn` construct can also be used to skip certain
592 columns when producing a ``CREATE TABLE``. This is accomplished by
593 creating a compilation rule that conditionally returns ``None``.
594 This is essentially how to produce the same effect as using the
595 ``system=True`` argument on :class:`_schema.Column`, which marks a column
596 as an implicitly-present "system" column.
598 For example, suppose we wish to produce a :class:`_schema.Table`
599 which skips
600 rendering of the PostgreSQL ``xmin`` column against the PostgreSQL
601 backend, but on other backends does render it, in anticipation of a
602 triggered rule. A conditional compilation rule could skip this name only
603 on PostgreSQL::
605 from sqlalchemy.schema import CreateColumn
607 @compiles(CreateColumn, "postgresql")
608 def skip_xmin(element, compiler, **kw):
609 if element.element.name == 'xmin':
610 return None
611 else:
612 return compiler.visit_create_column(element, **kw)
615 my_table = Table('mytable', metadata,
616 Column('id', Integer, primary_key=True),
617 Column('xmin', Integer)
618 )
620 Above, a :class:`.CreateTable` construct will generate a ``CREATE TABLE``
621 which only includes the ``id`` column in the string; the ``xmin`` column
622 will be omitted, but only against the PostgreSQL backend.
624 """
626 __visit_name__ = "create_column"
628 def __init__(self, element):
629 self.element = element
632class DropTable(_CreateDropBase):
633 """Represent a DROP TABLE statement."""
635 __visit_name__ = "drop_table"
638class CreateSequence(_CreateDropBase):
639 """Represent a CREATE SEQUENCE statement."""
641 __visit_name__ = "create_sequence"
644class DropSequence(_CreateDropBase):
645 """Represent a DROP SEQUENCE statement."""
647 __visit_name__ = "drop_sequence"
650class CreateIndex(_CreateDropBase):
651 """Represent a CREATE INDEX statement."""
653 __visit_name__ = "create_index"
656class DropIndex(_CreateDropBase):
657 """Represent a DROP INDEX statement."""
659 __visit_name__ = "drop_index"
662class AddConstraint(_CreateDropBase):
663 """Represent an ALTER TABLE ADD CONSTRAINT statement."""
665 __visit_name__ = "add_constraint"
667 def __init__(self, element, *args, **kw):
668 super(AddConstraint, self).__init__(element, *args, **kw)
669 element._create_rule = util.portable_instancemethod(
670 self._create_rule_disable
671 )
674class DropConstraint(_CreateDropBase):
675 """Represent an ALTER TABLE DROP CONSTRAINT statement."""
677 __visit_name__ = "drop_constraint"
679 def __init__(self, element, cascade=False, **kw):
680 self.cascade = cascade
681 super(DropConstraint, self).__init__(element, **kw)
682 element._create_rule = util.portable_instancemethod(
683 self._create_rule_disable
684 )
687class SetTableComment(_CreateDropBase):
688 """Represent a COMMENT ON TABLE IS statement."""
690 __visit_name__ = "set_table_comment"
693class DropTableComment(_CreateDropBase):
694 """Represent a COMMENT ON TABLE '' statement.
696 Note this varies a lot across database backends.
698 """
700 __visit_name__ = "drop_table_comment"
703class SetColumnComment(_CreateDropBase):
704 """Represent a COMMENT ON COLUMN IS statement."""
706 __visit_name__ = "set_column_comment"
709class DropColumnComment(_CreateDropBase):
710 """Represent a COMMENT ON COLUMN IS NULL statement."""
712 __visit_name__ = "drop_column_comment"
715class DDLBase(SchemaVisitor):
716 def __init__(self, connection):
717 self.connection = connection
720class SchemaGenerator(DDLBase):
721 def __init__(
722 self, dialect, connection, checkfirst=False, tables=None, **kwargs
723 ):
724 super(SchemaGenerator, self).__init__(connection, **kwargs)
725 self.checkfirst = checkfirst
726 self.tables = tables
727 self.preparer = dialect.identifier_preparer
728 self.dialect = dialect
729 self.memo = {}
731 def _can_create_table(self, table):
732 self.dialect.validate_identifier(table.name)
733 effective_schema = self.connection.schema_for_object(table)
734 if effective_schema:
735 self.dialect.validate_identifier(effective_schema)
736 return not self.checkfirst or not self.dialect.has_table(
737 self.connection, table.name, schema=effective_schema
738 )
740 def _can_create_sequence(self, sequence):
741 effective_schema = self.connection.schema_for_object(sequence)
743 return self.dialect.supports_sequences and (
744 (not self.dialect.sequences_optional or not sequence.optional)
745 and (
746 not self.checkfirst
747 or not self.dialect.has_sequence(
748 self.connection, sequence.name, schema=effective_schema
749 )
750 )
751 )
753 def visit_metadata(self, metadata):
754 if self.tables is not None:
755 tables = self.tables
756 else:
757 tables = list(metadata.tables.values())
759 collection = sort_tables_and_constraints(
760 [t for t in tables if self._can_create_table(t)]
761 )
763 seq_coll = [
764 s
765 for s in metadata._sequences.values()
766 if s.column is None and self._can_create_sequence(s)
767 ]
769 event_collection = [t for (t, fks) in collection if t is not None]
770 metadata.dispatch.before_create(
771 metadata,
772 self.connection,
773 tables=event_collection,
774 checkfirst=self.checkfirst,
775 _ddl_runner=self,
776 )
778 for seq in seq_coll:
779 self.traverse_single(seq, create_ok=True)
781 for table, fkcs in collection:
782 if table is not None:
783 self.traverse_single(
784 table,
785 create_ok=True,
786 include_foreign_key_constraints=fkcs,
787 _is_metadata_operation=True,
788 )
789 else:
790 for fkc in fkcs:
791 self.traverse_single(fkc)
793 metadata.dispatch.after_create(
794 metadata,
795 self.connection,
796 tables=event_collection,
797 checkfirst=self.checkfirst,
798 _ddl_runner=self,
799 )
801 def visit_table(
802 self,
803 table,
804 create_ok=False,
805 include_foreign_key_constraints=None,
806 _is_metadata_operation=False,
807 ):
808 if not create_ok and not self._can_create_table(table):
809 return
811 table.dispatch.before_create(
812 table,
813 self.connection,
814 checkfirst=self.checkfirst,
815 _ddl_runner=self,
816 _is_metadata_operation=_is_metadata_operation,
817 )
819 for column in table.columns:
820 if column.default is not None:
821 self.traverse_single(column.default)
823 if not self.dialect.supports_alter:
824 # e.g., don't omit any foreign key constraints
825 include_foreign_key_constraints = None
827 self.connection.execute(
828 # fmt: off
829 CreateTable(
830 table,
831 include_foreign_key_constraints= # noqa
832 include_foreign_key_constraints, # noqa
833 )
834 # fmt: on
835 )
837 if hasattr(table, "indexes"):
838 for index in table.indexes:
839 self.traverse_single(index)
841 if self.dialect.supports_comments and not self.dialect.inline_comments:
842 if table.comment is not None:
843 self.connection.execute(SetTableComment(table))
845 for column in table.columns:
846 if column.comment is not None:
847 self.connection.execute(SetColumnComment(column))
849 table.dispatch.after_create(
850 table,
851 self.connection,
852 checkfirst=self.checkfirst,
853 _ddl_runner=self,
854 _is_metadata_operation=_is_metadata_operation,
855 )
857 def visit_foreign_key_constraint(self, constraint):
858 if not self.dialect.supports_alter:
859 return
860 self.connection.execute(AddConstraint(constraint))
862 def visit_sequence(self, sequence, create_ok=False):
863 if not create_ok and not self._can_create_sequence(sequence):
864 return
865 self.connection.execute(CreateSequence(sequence))
867 def visit_index(self, index):
868 self.connection.execute(CreateIndex(index))
871class SchemaDropper(DDLBase):
872 def __init__(
873 self, dialect, connection, checkfirst=False, tables=None, **kwargs
874 ):
875 super(SchemaDropper, self).__init__(connection, **kwargs)
876 self.checkfirst = checkfirst
877 self.tables = tables
878 self.preparer = dialect.identifier_preparer
879 self.dialect = dialect
880 self.memo = {}
882 def visit_metadata(self, metadata):
883 if self.tables is not None:
884 tables = self.tables
885 else:
886 tables = list(metadata.tables.values())
888 try:
889 unsorted_tables = [t for t in tables if self._can_drop_table(t)]
890 collection = list(
891 reversed(
892 sort_tables_and_constraints(
893 unsorted_tables,
894 filter_fn=lambda constraint: False
895 if not self.dialect.supports_alter
896 or constraint.name is None
897 else None,
898 )
899 )
900 )
901 except exc.CircularDependencyError as err2:
902 if not self.dialect.supports_alter:
903 util.warn(
904 "Can't sort tables for DROP; an "
905 "unresolvable foreign key "
906 "dependency exists between tables: %s, and backend does "
907 "not support ALTER. To restore at least a partial sort, "
908 "apply use_alter=True to ForeignKey and "
909 "ForeignKeyConstraint "
910 "objects involved in the cycle to mark these as known "
911 "cycles that will be ignored."
912 % (", ".join(sorted([t.fullname for t in err2.cycles])))
913 )
914 collection = [(t, ()) for t in unsorted_tables]
915 else:
916 util.raise_(
917 exc.CircularDependencyError(
918 err2.args[0],
919 err2.cycles,
920 err2.edges,
921 msg="Can't sort tables for DROP; an "
922 "unresolvable foreign key "
923 "dependency exists between tables: %s. Please ensure "
924 "that the ForeignKey and ForeignKeyConstraint objects "
925 "involved in the cycle have "
926 "names so that they can be dropped using "
927 "DROP CONSTRAINT."
928 % (
929 ", ".join(
930 sorted([t.fullname for t in err2.cycles])
931 )
932 ),
933 ),
934 from_=err2,
935 )
937 seq_coll = [
938 s
939 for s in metadata._sequences.values()
940 if s.column is None and self._can_drop_sequence(s)
941 ]
943 event_collection = [t for (t, fks) in collection if t is not None]
945 metadata.dispatch.before_drop(
946 metadata,
947 self.connection,
948 tables=event_collection,
949 checkfirst=self.checkfirst,
950 _ddl_runner=self,
951 )
953 for table, fkcs in collection:
954 if table is not None:
955 self.traverse_single(
956 table, drop_ok=True, _is_metadata_operation=True
957 )
958 else:
959 for fkc in fkcs:
960 self.traverse_single(fkc)
962 for seq in seq_coll:
963 self.traverse_single(seq, drop_ok=True)
965 metadata.dispatch.after_drop(
966 metadata,
967 self.connection,
968 tables=event_collection,
969 checkfirst=self.checkfirst,
970 _ddl_runner=self,
971 )
973 def _can_drop_table(self, table):
974 self.dialect.validate_identifier(table.name)
975 effective_schema = self.connection.schema_for_object(table)
976 if effective_schema:
977 self.dialect.validate_identifier(effective_schema)
978 return not self.checkfirst or self.dialect.has_table(
979 self.connection, table.name, schema=effective_schema
980 )
982 def _can_drop_sequence(self, sequence):
983 effective_schema = self.connection.schema_for_object(sequence)
984 return self.dialect.supports_sequences and (
985 (not self.dialect.sequences_optional or not sequence.optional)
986 and (
987 not self.checkfirst
988 or self.dialect.has_sequence(
989 self.connection, sequence.name, schema=effective_schema
990 )
991 )
992 )
994 def visit_index(self, index):
995 self.connection.execute(DropIndex(index))
997 def visit_table(self, table, drop_ok=False, _is_metadata_operation=False):
998 if not drop_ok and not self._can_drop_table(table):
999 return
1001 table.dispatch.before_drop(
1002 table,
1003 self.connection,
1004 checkfirst=self.checkfirst,
1005 _ddl_runner=self,
1006 _is_metadata_operation=_is_metadata_operation,
1007 )
1009 self.connection.execute(DropTable(table))
1011 # traverse client side defaults which may refer to server-side
1012 # sequences. noting that some of these client side defaults may also be
1013 # set up as server side defaults (see http://docs.sqlalchemy.org/en/
1014 # latest/core/defaults.html#associating-a-sequence-as-the-server-side-
1015 # default), so have to be dropped after the table is dropped.
1016 for column in table.columns:
1017 if column.default is not None:
1018 self.traverse_single(column.default)
1020 table.dispatch.after_drop(
1021 table,
1022 self.connection,
1023 checkfirst=self.checkfirst,
1024 _ddl_runner=self,
1025 _is_metadata_operation=_is_metadata_operation,
1026 )
1028 def visit_foreign_key_constraint(self, constraint):
1029 if not self.dialect.supports_alter:
1030 return
1031 self.connection.execute(DropConstraint(constraint))
1033 def visit_sequence(self, sequence, drop_ok=False):
1035 if not drop_ok and not self._can_drop_sequence(sequence):
1036 return
1037 self.connection.execute(DropSequence(sequence))
1040def sort_tables(
1041 tables, skip_fn=None, extra_dependencies=None,
1042):
1043 """sort a collection of :class:`_schema.Table` objects based on dependency
1044 .
1046 This is a dependency-ordered sort which will emit :class:`_schema.Table`
1047 objects such that they will follow their dependent :class:`_schema.Table`
1048 objects.
1049 Tables are dependent on another based on the presence of
1050 :class:`_schema.ForeignKeyConstraint`
1051 objects as well as explicit dependencies
1052 added by :meth:`_schema.Table.add_is_dependent_on`.
1054 .. warning::
1056 The :func:`._schema.sort_tables` function cannot by itself
1057 accommodate automatic resolution of dependency cycles between
1058 tables, which are usually caused by mutually dependent foreign key
1059 constraints. When these cycles are detected, the foreign keys
1060 of these tables are omitted from consideration in the sort.
1061 A warning is emitted when this condition occurs, which will be an
1062 exception raise in a future release. Tables which are not part
1063 of the cycle will still be returned in dependency order.
1065 To resolve these cycles, the
1066 :paramref:`_schema.ForeignKeyConstraint.use_alter` parameter may be
1067 applied to those constraints which create a cycle. Alternatively,
1068 the :func:`_schema.sort_tables_and_constraints` function will
1069 automatically return foreign key constraints in a separate
1070 collection when cycles are detected so that they may be applied
1071 to a schema separately.
1073 .. versionchanged:: 1.3.17 - a warning is emitted when
1074 :func:`_schema.sort_tables` cannot perform a proper sort due to
1075 cyclical dependencies. This will be an exception in a future
1076 release. Additionally, the sort will continue to return
1077 other tables not involved in the cycle in dependency order
1078 which was not the case previously.
1080 :param tables: a sequence of :class:`_schema.Table` objects.
1082 :param skip_fn: optional callable which will be passed a
1083 :class:`_schema.ForeignKey` object; if it returns True, this
1084 constraint will not be considered as a dependency. Note this is
1085 **different** from the same parameter in
1086 :func:`.sort_tables_and_constraints`, which is
1087 instead passed the owning :class:`_schema.ForeignKeyConstraint` object.
1089 :param extra_dependencies: a sequence of 2-tuples of tables which will
1090 also be considered as dependent on each other.
1092 .. seealso::
1094 :func:`.sort_tables_and_constraints`
1096 :attr:`_schema.MetaData.sorted_tables` - uses this function to sort
1099 """
1101 if skip_fn is not None:
1103 def _skip_fn(fkc):
1104 for fk in fkc.elements:
1105 if skip_fn(fk):
1106 return True
1107 else:
1108 return None
1110 else:
1111 _skip_fn = None
1113 return [
1114 t
1115 for (t, fkcs) in sort_tables_and_constraints(
1116 tables,
1117 filter_fn=_skip_fn,
1118 extra_dependencies=extra_dependencies,
1119 _warn_for_cycles=True,
1120 )
1121 if t is not None
1122 ]
1125def sort_tables_and_constraints(
1126 tables, filter_fn=None, extra_dependencies=None, _warn_for_cycles=False
1127):
1128 """sort a collection of :class:`_schema.Table` /
1129 :class:`_schema.ForeignKeyConstraint`
1130 objects.
1132 This is a dependency-ordered sort which will emit tuples of
1133 ``(Table, [ForeignKeyConstraint, ...])`` such that each
1134 :class:`_schema.Table` follows its dependent :class:`_schema.Table`
1135 objects.
1136 Remaining :class:`_schema.ForeignKeyConstraint`
1137 objects that are separate due to
1138 dependency rules not satisfied by the sort are emitted afterwards
1139 as ``(None, [ForeignKeyConstraint ...])``.
1141 Tables are dependent on another based on the presence of
1142 :class:`_schema.ForeignKeyConstraint` objects, explicit dependencies
1143 added by :meth:`_schema.Table.add_is_dependent_on`,
1144 as well as dependencies
1145 stated here using the :paramref:`~.sort_tables_and_constraints.skip_fn`
1146 and/or :paramref:`~.sort_tables_and_constraints.extra_dependencies`
1147 parameters.
1149 :param tables: a sequence of :class:`_schema.Table` objects.
1151 :param filter_fn: optional callable which will be passed a
1152 :class:`_schema.ForeignKeyConstraint` object,
1153 and returns a value based on
1154 whether this constraint should definitely be included or excluded as
1155 an inline constraint, or neither. If it returns False, the constraint
1156 will definitely be included as a dependency that cannot be subject
1157 to ALTER; if True, it will **only** be included as an ALTER result at
1158 the end. Returning None means the constraint is included in the
1159 table-based result unless it is detected as part of a dependency cycle.
1161 :param extra_dependencies: a sequence of 2-tuples of tables which will
1162 also be considered as dependent on each other.
1164 .. versionadded:: 1.0.0
1166 .. seealso::
1168 :func:`.sort_tables`
1171 """
1173 fixed_dependencies = set()
1174 mutable_dependencies = set()
1176 if extra_dependencies is not None:
1177 fixed_dependencies.update(extra_dependencies)
1179 remaining_fkcs = set()
1180 for table in tables:
1181 for fkc in table.foreign_key_constraints:
1182 if fkc.use_alter is True:
1183 remaining_fkcs.add(fkc)
1184 continue
1186 if filter_fn:
1187 filtered = filter_fn(fkc)
1189 if filtered is True:
1190 remaining_fkcs.add(fkc)
1191 continue
1193 dependent_on = fkc.referred_table
1194 if dependent_on is not table:
1195 mutable_dependencies.add((dependent_on, table))
1197 fixed_dependencies.update(
1198 (parent, table) for parent in table._extra_dependencies
1199 )
1201 try:
1202 candidate_sort = list(
1203 topological.sort(
1204 fixed_dependencies.union(mutable_dependencies),
1205 tables,
1206 deterministic_order=True,
1207 )
1208 )
1209 except exc.CircularDependencyError as err:
1210 if _warn_for_cycles:
1211 util.warn(
1212 "Cannot correctly sort tables; there are unresolvable cycles "
1213 'between tables "%s", which is usually caused by mutually '
1214 "dependent foreign key constraints. Foreign key constraints "
1215 "involving these tables will not be considered; this warning "
1216 "may raise an error in a future release."
1217 % (", ".join(sorted(t.fullname for t in err.cycles)),)
1218 )
1219 for edge in err.edges:
1220 if edge in mutable_dependencies:
1221 table = edge[1]
1222 if table not in err.cycles:
1223 continue
1224 can_remove = [
1225 fkc
1226 for fkc in table.foreign_key_constraints
1227 if filter_fn is None or filter_fn(fkc) is not False
1228 ]
1229 remaining_fkcs.update(can_remove)
1230 for fkc in can_remove:
1231 dependent_on = fkc.referred_table
1232 if dependent_on is not table:
1233 mutable_dependencies.discard((dependent_on, table))
1234 candidate_sort = list(
1235 topological.sort(
1236 fixed_dependencies.union(mutable_dependencies),
1237 tables,
1238 deterministic_order=True,
1239 )
1240 )
1242 return [
1243 (table, table.foreign_key_constraints.difference(remaining_fkcs))
1244 for table in candidate_sort
1245 ] + [(None, list(remaining_fkcs))]