Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1from collections import namedtuple 

2import re 

3 

4from sqlalchemy import schema 

5from sqlalchemy import text 

6 

7from . import base 

8from .. import util 

9from ..util import sqla_compat 

10from ..util.compat import string_types 

11from ..util.compat import text_type 

12from ..util.compat import with_metaclass 

13 

14 

15class ImplMeta(type): 

16 def __init__(cls, classname, bases, dict_): 

17 newtype = type.__init__(cls, classname, bases, dict_) 

18 if "__dialect__" in dict_: 

19 _impls[dict_["__dialect__"]] = cls 

20 return newtype 

21 

22 

23_impls = {} 

24 

25Params = namedtuple("Params", ["token0", "tokens", "args", "kwargs"]) 

26 

27 

28class DefaultImpl(with_metaclass(ImplMeta)): 

29 

30 """Provide the entrypoint for major migration operations, 

31 including database-specific behavioral variances. 

32 

33 While individual SQL/DDL constructs already provide 

34 for database-specific implementations, variances here 

35 allow for entirely different sequences of operations 

36 to take place for a particular migration, such as 

37 SQL Server's special 'IDENTITY INSERT' step for 

38 bulk inserts. 

39 

40 """ 

41 

42 __dialect__ = "default" 

43 

44 transactional_ddl = False 

45 command_terminator = ";" 

46 type_synonyms = ({"NUMERIC", "DECIMAL"},) 

47 type_arg_extract = () 

48 

49 def __init__( 

50 self, 

51 dialect, 

52 connection, 

53 as_sql, 

54 transactional_ddl, 

55 output_buffer, 

56 context_opts, 

57 ): 

58 self.dialect = dialect 

59 self.connection = connection 

60 self.as_sql = as_sql 

61 self.literal_binds = context_opts.get("literal_binds", False) 

62 

63 self.output_buffer = output_buffer 

64 self.memo = {} 

65 self.context_opts = context_opts 

66 if transactional_ddl is not None: 

67 self.transactional_ddl = transactional_ddl 

68 

69 if self.literal_binds: 

70 if not self.as_sql: 

71 raise util.CommandError( 

72 "Can't use literal_binds setting without as_sql mode" 

73 ) 

74 

75 @classmethod 

76 def get_by_dialect(cls, dialect): 

77 return _impls[dialect.name] 

78 

79 def static_output(self, text): 

80 self.output_buffer.write(text_type(text + "\n\n")) 

81 self.output_buffer.flush() 

82 

83 def requires_recreate_in_batch(self, batch_op): 

84 """Return True if the given :class:`.BatchOperationsImpl` 

85 would need the table to be recreated and copied in order to 

86 proceed. 

87 

88 Normally, only returns True on SQLite when operations other 

89 than add_column are present. 

90 

91 """ 

92 return False 

93 

94 def prep_table_for_batch(self, table): 

95 """perform any operations needed on a table before a new 

96 one is created to replace it in batch mode. 

97 

98 the PG dialect uses this to drop constraints on the table 

99 before the new one uses those same names. 

100 

101 """ 

102 

103 @property 

104 def bind(self): 

105 return self.connection 

106 

107 def _exec( 

108 self, 

109 construct, 

110 execution_options=None, 

111 multiparams=(), 

112 params=util.immutabledict(), 

113 ): 

114 if isinstance(construct, string_types): 

115 construct = text(construct) 

116 if self.as_sql: 

117 if multiparams or params: 

118 # TODO: coverage 

119 raise Exception("Execution arguments not allowed with as_sql") 

120 

121 if self.literal_binds and not isinstance( 

122 construct, schema.DDLElement 

123 ): 

124 compile_kw = dict(compile_kwargs={"literal_binds": True}) 

125 else: 

126 compile_kw = {} 

127 

128 self.static_output( 

129 text_type( 

130 construct.compile(dialect=self.dialect, **compile_kw) 

131 ) 

132 .replace("\t", " ") 

133 .strip() 

134 + self.command_terminator 

135 ) 

136 else: 

137 conn = self.connection 

138 if execution_options: 

139 conn = conn.execution_options(**execution_options) 

140 return conn.execute(construct, *multiparams, **params) 

141 

142 def execute(self, sql, execution_options=None): 

143 self._exec(sql, execution_options) 

144 

145 def alter_column( 

146 self, 

147 table_name, 

148 column_name, 

149 nullable=None, 

150 server_default=False, 

151 name=None, 

152 type_=None, 

153 schema=None, 

154 autoincrement=None, 

155 comment=False, 

156 existing_comment=None, 

157 existing_type=None, 

158 existing_server_default=None, 

159 existing_nullable=None, 

160 existing_autoincrement=None, 

161 ): 

162 if autoincrement is not None or existing_autoincrement is not None: 

163 util.warn( 

164 "autoincrement and existing_autoincrement " 

165 "only make sense for MySQL", 

166 stacklevel=3, 

167 ) 

168 if nullable is not None: 

169 self._exec( 

170 base.ColumnNullable( 

171 table_name, 

172 column_name, 

173 nullable, 

174 schema=schema, 

175 existing_type=existing_type, 

176 existing_server_default=existing_server_default, 

177 existing_nullable=existing_nullable, 

178 existing_comment=existing_comment, 

179 ) 

180 ) 

181 if server_default is not False: 

182 self._exec( 

183 base.ColumnDefault( 

184 table_name, 

185 column_name, 

186 server_default, 

187 schema=schema, 

188 existing_type=existing_type, 

189 existing_server_default=existing_server_default, 

190 existing_nullable=existing_nullable, 

191 existing_comment=existing_comment, 

192 ) 

193 ) 

194 if type_ is not None: 

195 self._exec( 

196 base.ColumnType( 

197 table_name, 

198 column_name, 

199 type_, 

200 schema=schema, 

201 existing_type=existing_type, 

202 existing_server_default=existing_server_default, 

203 existing_nullable=existing_nullable, 

204 existing_comment=existing_comment, 

205 ) 

206 ) 

207 

208 if comment is not False: 

209 self._exec( 

210 base.ColumnComment( 

211 table_name, 

212 column_name, 

213 comment, 

214 schema=schema, 

215 existing_type=existing_type, 

216 existing_server_default=existing_server_default, 

217 existing_nullable=existing_nullable, 

218 existing_comment=existing_comment, 

219 ) 

220 ) 

221 

222 # do the new name last ;) 

223 if name is not None: 

224 self._exec( 

225 base.ColumnName( 

226 table_name, 

227 column_name, 

228 name, 

229 schema=schema, 

230 existing_type=existing_type, 

231 existing_server_default=existing_server_default, 

232 existing_nullable=existing_nullable, 

233 ) 

234 ) 

235 

236 def add_column(self, table_name, column, schema=None): 

237 self._exec(base.AddColumn(table_name, column, schema=schema)) 

238 

239 def drop_column(self, table_name, column, schema=None, **kw): 

240 self._exec(base.DropColumn(table_name, column, schema=schema)) 

241 

242 def add_constraint(self, const): 

243 if const._create_rule is None or const._create_rule(self): 

244 self._exec(schema.AddConstraint(const)) 

245 

246 def drop_constraint(self, const): 

247 self._exec(schema.DropConstraint(const)) 

248 

249 def rename_table(self, old_table_name, new_table_name, schema=None): 

250 self._exec( 

251 base.RenameTable(old_table_name, new_table_name, schema=schema) 

252 ) 

253 

254 def create_table(self, table): 

255 table.dispatch.before_create( 

256 table, self.connection, checkfirst=False, _ddl_runner=self 

257 ) 

258 self._exec(schema.CreateTable(table)) 

259 table.dispatch.after_create( 

260 table, self.connection, checkfirst=False, _ddl_runner=self 

261 ) 

262 for index in table.indexes: 

263 self._exec(schema.CreateIndex(index)) 

264 

265 with_comment = ( 

266 sqla_compat._dialect_supports_comments(self.dialect) 

267 and not self.dialect.inline_comments 

268 ) 

269 comment = sqla_compat._comment_attribute(table) 

270 if comment and with_comment: 

271 self.create_table_comment(table) 

272 

273 for column in table.columns: 

274 comment = sqla_compat._comment_attribute(column) 

275 if comment and with_comment: 

276 self.create_column_comment(column) 

277 

278 def drop_table(self, table): 

279 self._exec(schema.DropTable(table)) 

280 

281 def create_index(self, index): 

282 self._exec(schema.CreateIndex(index)) 

283 

284 def create_table_comment(self, table): 

285 self._exec(schema.SetTableComment(table)) 

286 

287 def drop_table_comment(self, table): 

288 self._exec(schema.DropTableComment(table)) 

289 

290 def create_column_comment(self, column): 

291 self._exec(schema.SetColumnComment(column)) 

292 

293 def drop_index(self, index): 

294 self._exec(schema.DropIndex(index)) 

295 

296 def bulk_insert(self, table, rows, multiinsert=True): 

297 if not isinstance(rows, list): 

298 raise TypeError("List expected") 

299 elif rows and not isinstance(rows[0], dict): 

300 raise TypeError("List of dictionaries expected") 

301 if self.as_sql: 

302 for row in rows: 

303 self._exec( 

304 table.insert(inline=True).values( 

305 **dict( 

306 ( 

307 k, 

308 sqla_compat._literal_bindparam( 

309 k, v, type_=table.c[k].type 

310 ) 

311 if not isinstance( 

312 v, sqla_compat._literal_bindparam 

313 ) 

314 else v, 

315 ) 

316 for k, v in row.items() 

317 ) 

318 ) 

319 ) 

320 else: 

321 # work around http://www.sqlalchemy.org/trac/ticket/2461 

322 if not hasattr(table, "_autoincrement_column"): 

323 table._autoincrement_column = None 

324 if rows: 

325 if multiinsert: 

326 self._exec(table.insert(inline=True), multiparams=rows) 

327 else: 

328 for row in rows: 

329 self._exec(table.insert(inline=True).values(**row)) 

330 

331 def _tokenize_column_type(self, column): 

332 definition = self.dialect.type_compiler.process(column.type).lower() 

333 

334 # tokenize the SQLAlchemy-generated version of a type, so that 

335 # the two can be compared. 

336 # 

337 # examples: 

338 # NUMERIC(10, 5) 

339 # TIMESTAMP WITH TIMEZONE 

340 # INTEGER UNSIGNED 

341 # INTEGER (10) UNSIGNED 

342 # INTEGER(10) UNSIGNED 

343 # varchar character set utf8 

344 # 

345 

346 tokens = re.findall(r"[\w\-_]+|\(.+?\)", definition) 

347 

348 term_tokens = [] 

349 paren_term = None 

350 

351 for token in tokens: 

352 if re.match(r"^\(.*\)$", token): 

353 paren_term = token 

354 else: 

355 term_tokens.append(token) 

356 

357 params = Params(term_tokens[0], term_tokens[1:], [], {}) 

358 

359 if paren_term: 

360 for term in re.findall("[^(),]+", paren_term): 

361 if "=" in term: 

362 key, val = term.split("=") 

363 params.kwargs[key.strip()] = val.strip() 

364 else: 

365 params.args.append(term.strip()) 

366 

367 return params 

368 

369 def _column_types_match(self, inspector_params, metadata_params): 

370 if inspector_params.token0 == metadata_params.token0: 

371 return True 

372 

373 synonyms = [{t.lower() for t in batch} for batch in self.type_synonyms] 

374 inspector_all_terms = " ".join( 

375 [inspector_params.token0] + inspector_params.tokens 

376 ) 

377 metadata_all_terms = " ".join( 

378 [metadata_params.token0] + metadata_params.tokens 

379 ) 

380 

381 for batch in synonyms: 

382 if {inspector_all_terms, metadata_all_terms}.issubset(batch) or { 

383 inspector_params.token0, 

384 metadata_params.token0, 

385 }.issubset(batch): 

386 return True 

387 return False 

388 

389 def _column_args_match(self, inspected_params, meta_params): 

390 """We want to compare column parameters. However, we only want 

391 to compare parameters that are set. If they both have `collation`, 

392 we want to make sure they are the same. However, if only one 

393 specifies it, dont flag it for being less specific 

394 """ 

395 

396 if ( 

397 len(meta_params.tokens) == len(inspected_params.tokens) 

398 and meta_params.tokens != inspected_params.tokens 

399 ): 

400 return False 

401 

402 if ( 

403 len(meta_params.args) == len(inspected_params.args) 

404 and meta_params.args != inspected_params.args 

405 ): 

406 return False 

407 

408 insp = " ".join(inspected_params.tokens).lower() 

409 meta = " ".join(meta_params.tokens).lower() 

410 

411 for reg in self.type_arg_extract: 

412 mi = re.search(reg, insp) 

413 mm = re.search(reg, meta) 

414 

415 if mi and mm and mi.group(1) != mm.group(1): 

416 return False 

417 

418 return True 

419 

420 def compare_type(self, inspector_column, metadata_column): 

421 """Returns True if there ARE differences between the types of the two 

422 columns. Takes impl.type_synonyms into account between retrospected 

423 and metadata types 

424 """ 

425 inspector_params = self._tokenize_column_type(inspector_column) 

426 metadata_params = self._tokenize_column_type(metadata_column) 

427 

428 if not self._column_types_match(inspector_params, metadata_params,): 

429 return True 

430 if not self._column_args_match(inspector_params, metadata_params): 

431 return True 

432 return False 

433 

434 def compare_server_default( 

435 self, 

436 inspector_column, 

437 metadata_column, 

438 rendered_metadata_default, 

439 rendered_inspector_default, 

440 ): 

441 return rendered_inspector_default != rendered_metadata_default 

442 

443 def correct_for_autogen_constraints( 

444 self, 

445 conn_uniques, 

446 conn_indexes, 

447 metadata_unique_constraints, 

448 metadata_indexes, 

449 ): 

450 pass 

451 

452 def render_ddl_sql_expr(self, expr, is_server_default=False, **kw): 

453 """Render a SQL expression that is typically a server default, 

454 index expression, etc. 

455 

456 .. versionadded:: 1.0.11 

457 

458 """ 

459 

460 compile_kw = dict( 

461 compile_kwargs={"literal_binds": True, "include_table": False} 

462 ) 

463 return text_type(expr.compile(dialect=self.dialect, **compile_kw)) 

464 

465 def _compat_autogen_column_reflect(self, inspector): 

466 return self.autogen_column_reflect 

467 

468 def correct_for_autogen_foreignkeys(self, conn_fks, metadata_fks): 

469 pass 

470 

471 def autogen_column_reflect(self, inspector, table, column_info): 

472 """A hook that is attached to the 'column_reflect' event for when 

473 a Table is reflected from the database during the autogenerate 

474 process. 

475 

476 Dialects can elect to modify the information gathered here. 

477 

478 """ 

479 

480 def start_migrations(self): 

481 """A hook called when :meth:`.EnvironmentContext.run_migrations` 

482 is called. 

483 

484 Implementations can set up per-migration-run state here. 

485 

486 """ 

487 

488 def emit_begin(self): 

489 """Emit the string ``BEGIN``, or the backend-specific 

490 equivalent, on the current connection context. 

491 

492 This is used in offline mode and typically 

493 via :meth:`.EnvironmentContext.begin_transaction`. 

494 

495 """ 

496 self.static_output("BEGIN" + self.command_terminator) 

497 

498 def emit_commit(self): 

499 """Emit the string ``COMMIT``, or the backend-specific 

500 equivalent, on the current connection context. 

501 

502 This is used in offline mode and typically 

503 via :meth:`.EnvironmentContext.begin_transaction`. 

504 

505 """ 

506 self.static_output("COMMIT" + self.command_terminator) 

507 

508 def render_type(self, type_obj, autogen_context): 

509 return False