LSSTApplications  16.0-10-g0ee56ad+5,16.0-11-ga33d1f2+5,16.0-12-g3ef5c14+3,16.0-12-g71e5ef5+18,16.0-12-gbdf3636+3,16.0-13-g118c103+3,16.0-13-g8f68b0a+3,16.0-15-gbf5c1cb+4,16.0-16-gfd17674+3,16.0-17-g7c01f5c+3,16.0-18-g0a50484+1,16.0-20-ga20f992+8,16.0-21-g0e05fd4+6,16.0-21-g15e2d33+4,16.0-22-g62d8060+4,16.0-22-g847a80f+4,16.0-25-gf00d9b8+1,16.0-28-g3990c221+4,16.0-3-gf928089+3,16.0-32-g88a4f23+5,16.0-34-gd7987ad+3,16.0-37-gc7333cb+2,16.0-4-g10fc685+2,16.0-4-g18f3627+26,16.0-4-g5f3a788+26,16.0-5-gaf5c3d7+4,16.0-5-gcc1f4bb+1,16.0-6-g3b92700+4,16.0-6-g4412fcd+3,16.0-6-g7235603+4,16.0-69-g2562ce1b+2,16.0-8-g14ebd58+4,16.0-8-g2df868b+1,16.0-8-g4cec79c+6,16.0-8-gadf6c7a+1,16.0-8-gfc7ad86,16.0-82-g59ec2a54a+1,16.0-9-g5400cdc+2,16.0-9-ge6233d7+5,master-g2880f2d8cf+3,v17.0.rc1
LSSTDataManagementBasePackage
ppdbSchema.py
Go to the documentation of this file.
1 # This file is part of dax_ppdb.
2 #
3 # Developed for the LSST Data Management System.
4 # This product includes software developed by the LSST Project
5 # (http://www.lsst.org).
6 # See the COPYRIGHT file at the top-level directory of this distribution
7 # for details of code ownership.
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the GNU General Public License
20 # along with this program. If not, see <http://www.gnu.org/licenses/>.
21 
22 """Module responsible for PPDB schema operations.
23 """
24 
25 __all__ = ["ColumnDef", "IndexDef", "TableDef",
26  "make_minimal_dia_object_schema", "make_minimal_dia_source_schema",
27  "PpdbSchema"]
28 
29 from collections import namedtuple
30 import logging
31 import yaml
32 
33 import sqlalchemy
34 from sqlalchemy import (Column, Index, MetaData, PrimaryKeyConstraint,
35  UniqueConstraint, Table)
36 from sqlalchemy.schema import CreateTable, CreateIndex
37 from sqlalchemy.ext.compiler import compiles
38 import lsst.afw.table as afwTable
39 
40 
41 _LOG = logging.getLogger(__name__.partition(".")[2]) # strip leading "lsst."
42 
43 # Classes for representing schema
44 
45 # Column description:
46 # name : column name
47 # type : name of cat type (INT, FLOAT, etc.)
48 # nullable : True or False
49 # default : default value for column, can be None
50 # description : documentation, can be None or empty
51 # unit : string with unit name, can be None
52 # ucd : string with ucd, can be None
53 ColumnDef = namedtuple('ColumnDef', 'name type nullable default description unit ucd')
54 
55 # Index description:
56 # name : index name, can be None or empty
57 # type : one of "PRIMARY", "UNIQUE", "INDEX"
58 # columns : list of column names in index
59 IndexDef = namedtuple('IndexDef', 'name type columns')
60 
61 # Table description:
62 # name : table name
63 # description : documentation, can be None or empty
64 # columns : list of ColumnDef instances
65 # indices : list of IndexDef instances, can be empty or None
66 TableDef = namedtuple('TableDef', 'name description columns indices')
67 
68 
70  """Define and create the minimal schema required for a DIAObject.
71 
72  Returns
73  -------
74  schema : `lsst.afw.table.Schema`
75  Minimal schema for DIAObjects.
76  """
77  schema = afwTable.SourceTable.makeMinimalSchema()
78  schema.addField("pixelId", type='L',
79  doc='Unique spherical pixelization identifier.')
80  schema.addField("nDiaSources", type='L')
81  return schema
82 
83 
85  """ Define and create the minimal schema required for a DIASource.
86 
87  Returns
88  -------
89  schema : `lsst.afw.table.Schema`
90  Minimal schema for DIASources.
91  """
92  schema = afwTable.SourceTable.makeMinimalSchema()
93  schema.addField("diaObjectId", type='L',
94  doc='Unique identifier of the DIAObject this source is '
95  'associated to.')
96  schema.addField("ccdVisitId", type='L',
97  doc='Id of the exposure and ccd this object was detected '
98  'in.')
99  schema.addField("psFlux", type='D',
100  doc='Calibrated PSF flux of this source.')
101  schema.addField("psFluxErr", type='D',
102  doc='Calibrated PSF flux err of this source.')
103  schema.addField("flags", type='L',
104  doc='Quality flags for this DIASource.')
105  schema.addField("pixelId", type='L',
106  doc='Unique spherical pixelization identifier.')
107  return schema
108 
109 
110 @compiles(CreateTable, "oracle")
111 def _add_suffixes_tbl(element, compiler, **kw):
112  """Add all needed suffixed for Oracle CREATE TABLE statement.
113 
114  This is a special compilation method for CreateTable clause which
115  registers itself with SQLAlchemy using @compiles decotrator. Exact method
116  name does not matter. Client can pass a dict to ``info`` keyword argument
117  of Table constructor. If the dict has a key "oracle_tablespace" then its
118  value is used as tablespace name. If the dict has a key "oracle_iot" with
119  true value then IOT table is created. This method generates additional
120  clauses for CREATE TABLE statement which specify tablespace name and
121  "ORGANIZATION INDEX" for IOT.
122 
123  .. seealso:: https://docs.sqlalchemy.org/en/latest/core/compiler.html
124  """
125  text = compiler.visit_create_table(element, **kw)
126  _LOG.debug("text: %r", text)
127  oracle_tablespace = element.element.info.get("oracle_tablespace")
128  oracle_iot = element.element.info.get("oracle_iot", False)
129  _LOG.debug("oracle_tablespace: %r", oracle_tablespace)
130  if oracle_iot:
131  text += " ORGANIZATION INDEX"
132  if oracle_tablespace:
133  text += " TABLESPACE " + oracle_tablespace
134  _LOG.debug("text: %r", text)
135  return text
136 
137 
138 @compiles(CreateIndex, "oracle")
139 def _add_suffixes_idx(element, compiler, **kw):
140  """Add all needed suffixed for Oracle CREATE INDEX statement.
141 
142  This is a special compilation method for CreateIndex clause which
143  registers itself with SQLAlchemy using @compiles decotrator. Exact method
144  name does not matter. Client can pass a dict to ``info`` keyword argument
145  of Index constructor. If the dict has a key "oracle_tablespace" then its
146  value is used as tablespace name. This method generates additional
147  clause for CREATE INDEX statement which specifies tablespace name.
148 
149  .. seealso:: https://docs.sqlalchemy.org/en/latest/core/compiler.html
150  """
151  text = compiler.visit_create_index(element, **kw)
152  _LOG.debug("text: %r", text)
153  oracle_tablespace = element.element.info.get("oracle_tablespace")
154  _LOG.debug("oracle_tablespace: %r", oracle_tablespace)
155  if oracle_tablespace:
156  text += " TABLESPACE " + oracle_tablespace
157  _LOG.debug("text: %r", text)
158  return text
159 
160 
162  """Class for management of PPDB schema.
163 
164  Attributes
165  ----------
166  objects : `sqlalchemy.Table`
167  DiaObject table instance
168  objects_nightly : `sqlalchemy.Table`
169  DiaObjectNightly table instance, may be None
170  objects_last : `sqlalchemy.Table`
171  DiaObjectLast table instance, may be None
172  sources : `sqlalchemy.Table`
173  DiaSource table instance
174  forcedSources : `sqlalchemy.Table`
175  DiaForcedSource table instance
176  visits : `sqlalchemy.Table`
177  PpdbProtoVisits table instance
178 
179  Parameters
180  ----------
181  engine : `sqlalchemy.engine.Engine`
182  SQLAlchemy engine instance
183  dia_object_index : `str`
184  Indexing mode for DiaObject table, see `PpdbConfig.dia_object_index`
185  for details.
186  dia_object_nightly : `bool`
187  If `True` then create per-night DiaObject table as well.
188  schema_file : `str`
189  Name of the YAML schema file.
190  extra_schema_file : `str`, optional
191  Name of the YAML schema file with extra column definitions.
192  column_map : `str`, optional
193  Name of the YAML file with column mappings.
194  afw_schemas : `dict`, optional
195  Dictionary with table name for a key and `afw.table.Schema`
196  for a value. Columns in schema will be added to standard PPDB
197  schema (only if standard schema does not have matching column).
198  prefix : `str`, optional
199  Prefix to add to all scheam elements.
200  """
201 
202  # map afw type names into cat type names
203  _afw_type_map = {"I": "INT",
204  "L": "BIGINT",
205  "F": "FLOAT",
206  "D": "DOUBLE",
207  "Angle": "DOUBLE",
208  "String": "CHAR"}
209  _afw_type_map_reverse = {"INT": "I",
210  "BIGINT": "L",
211  "FLOAT": "F",
212  "DOUBLE": "D",
213  "DATETIME": "L",
214  "CHAR": "String"}
215 
216  def __init__(self, engine, dia_object_index, dia_object_nightly,
217  schema_file, extra_schema_file=None, column_map=None,
218  afw_schemas=None, prefix=""):
219 
220  self._engine = engine
221  self._dia_object_index = dia_object_index
222  self._dia_object_nightly = dia_object_nightly
223  self._prefix = prefix
224 
225  self._metadata = MetaData(self._engine)
226 
227  self.objects = None
228  self.objects_nightly = None
229  self.objects_last = None
230  self.sources = None
231  self.forcedSources = None
232  self.visits = None
233 
234  if column_map:
235  _LOG.debug("Reading column map file %s", column_map)
236  with open(column_map) as yaml_stream:
237  # maps cat column name to afw column name
238  self._column_map = yaml.load(yaml_stream)
239  _LOG.debug("column map: %s", self._column_map)
240  else:
241  _LOG.debug("No column map file is given, initialize to empty")
242  self._column_map = {}
243  self._column_map_reverse = {}
244  for table, cmap in self._column_map.items():
245  # maps afw column name to cat column name
246  self._column_map_reverse[table] = {v: k for k, v in cmap.items()}
247  _LOG.debug("reverse column map: %s", self._column_map_reverse)
248 
249  # build complete table schema
250  self._schemas = self._buildSchemas(schema_file, extra_schema_file,
251  afw_schemas)
252 
253  # map cat column types to alchemy
254  self._type_map = dict(DOUBLE=self._getDoubleType(),
255  FLOAT=sqlalchemy.types.Float,
256  DATETIME=sqlalchemy.types.TIMESTAMP,
257  BIGINT=sqlalchemy.types.BigInteger,
258  INTEGER=sqlalchemy.types.Integer,
259  INT=sqlalchemy.types.Integer,
260  TINYINT=sqlalchemy.types.Integer,
261  BLOB=sqlalchemy.types.LargeBinary,
262  CHAR=sqlalchemy.types.CHAR)
263 
264  # generate schema for all tables, must be called last
265  self._makeTables()
266 
267  def _makeTables(self, mysql_engine='InnoDB', oracle_tablespace=None, oracle_iot=False):
268  """Generate schema for all tables.
269 
270  Parameters
271  ----------
272  mysql_engine : `str`, optional
273  MySQL engine type to use for new tables.
274  oracle_tablespace : `str`, optional
275  Name of Oracle tablespace, only useful with oracle
276  oracle_iot : `bool`, optional
277  Make Index-organized DiaObjectLast table.
278  """
279 
280  info = dict(oracle_tablespace=oracle_tablespace)
281 
282  if self._dia_object_index == 'pix_id_iov':
283  # Special PK with HTM column in first position
284  constraints = self._tableIndices('DiaObjectIndexHtmFirst', info)
285  else:
286  constraints = self._tableIndices('DiaObject', info)
287  table = Table(self._prefix+'DiaObject', self._metadata,
288  *(self._tableColumns('DiaObject') + constraints),
289  mysql_engine=mysql_engine,
290  info=info)
291  self.objects = table
292 
293  if self._dia_object_nightly:
294  # Same as DiaObject but no index
295  table = Table(self._prefix+'DiaObjectNightly', self._metadata,
296  *self._tableColumns('DiaObject'),
297  mysql_engine=mysql_engine,
298  info=info)
299  self.objects_nightly = table
300 
301  if self._dia_object_index == 'last_object_table':
302  # Same as DiaObject but with special index
303  info2 = info.copy()
304  info2.update(oracle_iot=oracle_iot)
305  table = Table(self._prefix+'DiaObjectLast', self._metadata,
306  *(self._tableColumns('DiaObjectLast') +
307  self._tableIndices('DiaObjectLast', info)),
308  mysql_engine=mysql_engine,
309  info=info2)
310  self.objects_last = table
311 
312  # for all other tables use index definitions in schema
313  for table_name in ('DiaSource', 'SSObject', 'DiaForcedSource', 'DiaObject_To_Object_Match'):
314  table = Table(self._prefix+table_name, self._metadata,
315  *(self._tableColumns(table_name) +
316  self._tableIndices(table_name, info)),
317  mysql_engine=mysql_engine,
318  info=info)
319  if table_name == 'DiaSource':
320  self.sources = table
321  elif table_name == 'DiaForcedSource':
322  self.forcedSources = table
323 
324  # special table to track visits, only used by prototype
325  table = Table(self._prefix+'PpdbProtoVisits', self._metadata,
326  Column('visitId', sqlalchemy.types.BigInteger, nullable=False),
327  Column('visitTime', sqlalchemy.types.TIMESTAMP, nullable=False),
328  PrimaryKeyConstraint('visitId', name=self._prefix+'PK_PpdbProtoVisits'),
329  Index(self._prefix+'IDX_PpdbProtoVisits_vTime', 'visitTime', info=info),
330  mysql_engine=mysql_engine,
331  info=info)
332  self.visits = table
333 
334  def makeSchema(self, drop=False, mysql_engine='InnoDB', oracle_tablespace=None, oracle_iot=False):
335  """Create or re-create all tables.
336 
337  Parameters
338  ----------
339  drop : `bool`, optional
340  If True then drop tables before creating new ones.
341  mysql_engine : `str`, optional
342  MySQL engine type to use for new tables.
343  oracle_tablespace : `str`, optional
344  Name of Oracle tablespace, only useful with oracle
345  oracle_iot : `bool`, optional
346  Make Index-organized DiaObjectLast table.
347  """
348 
349  # re-make table schema for all needed tables with possibly different options
350  _LOG.debug("clear metadata")
351  self._metadata.clear()
352  _LOG.debug("re-do schema mysql_engine=%r oracle_tablespace=%r",
353  mysql_engine, oracle_tablespace)
354  self._makeTables(mysql_engine=mysql_engine, oracle_tablespace=oracle_tablespace,
355  oracle_iot=oracle_iot)
356 
357  # create all tables (optionally drop first)
358  if drop:
359  _LOG.info('dropping all tables')
360  self._metadata.drop_all()
361  _LOG.info('creating all tables')
362  self._metadata.create_all()
363 
364  def getAfwSchema(self, table_name, columns=None):
365  """Return afw schema for given table.
366 
367  Parameters
368  ----------
369  table_name : `str`
370  One of known PPDB table names.
371  columns : `list` of `str`, optional
372  Include only given table columns in schema, by default all columns
373  are included.
374 
375  Returns
376  -------
377  schema : `lsst.afw.table.Schema`
378  column_map : `dict`
379  Mapping of the table/result column names into schema key.
380  """
381 
382  table = self._schemas[table_name]
383  col_map = self._column_map.get(table_name, {})
384 
385  # make a schema
386  col2afw = {}
387  schema = afwTable.SourceTable.makeMinimalSchema()
388  for column in table.columns:
389  if columns and column.name not in columns:
390  continue
391  afw_col = col_map.get(column.name, column.name)
392  if afw_col in schema.getNames():
393  # Continue if the column is already in the minimal schema.
394  key = schema.find(afw_col).getKey()
395  elif column.type in ("DOUBLE", "FLOAT") and column.unit == "deg":
396  #
397  # NOTE: degree to radian conversion is not supported (yet)
398  #
399  # angles in afw are radians and have special "Angle" type
400  key = schema.addField(afw_col,
401  type="Angle",
402  doc=column.description or "",
403  units="rad")
404  elif column.type == "BLOB":
405  # No BLOB support for now
406  key = None
407  else:
408  units = column.unit or ""
409  # some units in schema are not recognized by afw but we do not care
410  if self._afw_type_map_reverse[column.type] == 'String':
411  key = schema.addField(afw_col,
412  type=self._afw_type_map_reverse[column.type],
413  doc=column.description or "",
414  units=units,
415  parse_strict="silent",
416  size=10)
417  elif units == "deg":
418  key = schema.addField(afw_col,
419  type='Angle',
420  doc=column.description or "",
421  parse_strict="silent")
422  else:
423  key = schema.addField(afw_col,
424  type=self._afw_type_map_reverse[column.type],
425  doc=column.description or "",
426  units=units,
427  parse_strict="silent")
428  col2afw[column.name] = key
429 
430  return schema, col2afw
431 
432  def getAfwColumns(self, table_name):
433  """Returns mapping of afw column names to Column definitions.
434 
435  Parameters
436  ----------
437  table_name : `str`
438  One of known PPDB table names.
439 
440  Returns
441  -------
442  column_map : `dict`
443  Mapping of afw column names to `ColumnDef` instances.
444  """
445  table = self._schemas[table_name]
446  col_map = self._column_map.get(table_name, {})
447 
448  cmap = {}
449  for column in table.columns:
450  afw_name = col_map.get(column.name, column.name)
451  cmap[afw_name] = column
452  return cmap
453 
454  def getColumnMap(self, table_name):
455  """Returns mapping of column names to Column definitions.
456 
457  Parameters
458  ----------
459  table_name : `str`
460  One of known PPDB table names.
461 
462  Returns
463  -------
464  column_map : `dict`
465  Mapping of column names to `ColumnDef` instances.
466  """
467  table = self._schemas[table_name]
468  cmap = {column.name: column for column in table.columns}
469  return cmap
470 
471  def _buildSchemas(self, schema_file, extra_schema_file=None, afw_schemas=None):
472  """Create schema definitions for all tables.
473 
474  Reads YAML schemas and builds dictionary containing `TableDef`
475  instances for each table.
476 
477  Parameters
478  ----------
479  schema_file : `str`
480  Name of YAML file with standard cat schema.
481  extra_schema_file : `str`, optional
482  Name of YAML file with extra table information or `None`.
483  afw_schemas : `dict`, optional
484  Dictionary with table name for a key and `afw.table.Schema`
485  for a value. Columns in schema will be added to standard PPDB
486  schema (only if standard schema does not have matching column).
487 
488  Returns
489  -------
490  schemas : `dict`
491  Mapping of table names to `TableDef` instances.
492  """
493 
494  _LOG.debug("Reading schema file %s", schema_file)
495  with open(schema_file) as yaml_stream:
496  tables = list(yaml.load_all(yaml_stream))
497  # index it by table name
498  _LOG.debug("Read %d tables from schema", len(tables))
499 
500  if extra_schema_file:
501  _LOG.debug("Reading extra schema file %s", extra_schema_file)
502  with open(extra_schema_file) as yaml_stream:
503  extras = list(yaml.load_all(yaml_stream))
504  # index it by table name
505  schemas_extra = {table['table']: table for table in extras}
506  else:
507  schemas_extra = {}
508 
509  # merge extra schema into a regular schema, for now only columns are merged
510  for table in tables:
511  table_name = table['table']
512  if table_name in schemas_extra:
513  columns = table['columns']
514  extra_columns = schemas_extra[table_name].get('columns', [])
515  extra_columns = {col['name']: col for col in extra_columns}
516  _LOG.debug("Extra columns for table %s: %s", table_name, extra_columns.keys())
517  columns = []
518  for col in table['columns']:
519  if col['name'] in extra_columns:
520  columns.append(extra_columns.pop(col['name']))
521  else:
522  columns.append(col)
523  # add all remaining extra columns
524  table['columns'] = columns + list(extra_columns.values())
525 
526  if 'indices' in schemas_extra[table_name]:
527  raise RuntimeError("Extra table definition contains indices, "
528  "merging is not implemented")
529 
530  del schemas_extra[table_name]
531 
532  # Pure "extra" table definitions may contain indices
533  tables += schemas_extra.values()
534 
535  # convert all dicts into named tuples
536  schemas = {}
537  for table in tables:
538 
539  columns = table.get('columns', [])
540 
541  table_name = table['table']
542  afw_schema = afw_schemas and afw_schemas.get(table_name)
543  if afw_schema:
544  # use afw schema to create extra columns
545  column_names = {col['name'] for col in columns}
546  column_names_lower = {col.lower() for col in column_names}
547  for _, field in afw_schema:
548  column = self._field2dict(field, table_name)
549  if column['name'] not in column_names:
550  # check that there is no column name that only differs in case
551  if column['name'].lower() in column_names_lower:
552  raise ValueError("afw.table column name case does not match schema column name")
553  columns.append(column)
554 
555  table_columns = []
556  for col in columns:
557  # For prototype set default to 0 even if columns don't specify it
558  if "default" not in col:
559  default = None
560  if col['type'] not in ("BLOB", "DATETIME"):
561  default = 0
562  else:
563  default = col["default"]
564 
565  column = ColumnDef(name=col['name'],
566  type=col['type'],
567  nullable=col.get("nullable"),
568  default=default,
569  description=col.get("description"),
570  unit=col.get("unit"),
571  ucd=col.get("ucd"))
572  table_columns.append(column)
573 
574  table_indices = []
575  for idx in table.get('indices', []):
576  index = IndexDef(name=idx.get('name'),
577  type=idx.get('type'),
578  columns=idx.get('columns'))
579  table_indices.append(index)
580 
581  schemas[table_name] = TableDef(name=table_name,
582  description=table.get('description'),
583  columns=table_columns,
584  indices=table_indices)
585 
586  return schemas
587 
588  def _tableColumns(self, table_name):
589  """Return set of columns in a table
590 
591  Parameters
592  ----------
593  table_name : `str`
594  Name of the table.
595 
596  Returns
597  -------
598  column_defs : `list`
599  List of `Column` objects.
600  """
601 
602  # get the list of columns in primary key, they are treated somewhat
603  # specially below
604  table_schema = self._schemas[table_name]
605  pkey_columns = set()
606  for index in table_schema.indices:
607  if index.type == 'PRIMARY':
608  pkey_columns = set(index.columns)
609  break
610 
611  # convert all column dicts into alchemy Columns
612  column_defs = []
613  for column in table_schema.columns:
614  kwargs = dict(nullable=column.nullable)
615  if column.default is not None:
616  kwargs.update(server_default=str(column.default))
617  if column.name in pkey_columns:
618  kwargs.update(autoincrement=False)
619  ctype = self._type_map[column.type]
620  column_defs.append(Column(column.name, ctype, **kwargs))
621 
622  return column_defs
623 
624  def _field2dict(self, field, table_name):
625  """Convert afw schema field definition into a dict format.
626 
627  Parameters
628  ----------
629  field : `lsst.afw.table.Field`
630  Field in afw table schema.
631  table_name : `str`
632  Name of the table.
633 
634  Returns
635  -------
636  field_dict : `dict`
637  Field attributes for SQL schema:
638 
639  - ``name`` : field name (`str`)
640  - ``type`` : type name in SQL, e.g. "INT", "FLOAT" (`str`)
641  - ``nullable`` : `True` if column can be ``NULL`` (`bool`)
642  """
643  column = field.getName()
644  column = self._column_map_reverse[table_name].get(column, column)
645  ctype = self._afw_type_map[field.getTypeString()]
646  return dict(name=column, type=ctype, nullable=True)
647 
648  def _tableIndices(self, table_name, info):
649  """Return set of constraints/indices in a table
650 
651  Parameters
652  ----------
653  table_name : `str`
654  Name of the table.
655  info : `dict`
656  Additional options passed to SQLAlchemy index constructor.
657 
658  Returns
659  -------
660  index_defs : `list`
661  List of SQLAlchemy index/constraint objects.
662  """
663 
664  table_schema = self._schemas[table_name]
665 
666  # convert all index dicts into alchemy Columns
667  index_defs = []
668  for index in table_schema.indices:
669  if index.type == "INDEX":
670  index_defs.append(Index(self._prefix+index.name, *index.columns, info=info))
671  else:
672  kwargs = {}
673  if index.name:
674  kwargs['name'] = self._prefix+index.name
675  if index.type == "PRIMARY":
676  index_defs.append(PrimaryKeyConstraint(*index.columns, **kwargs))
677  elif index.type == "UNIQUE":
678  index_defs.append(UniqueConstraint(*index.columns, **kwargs))
679 
680  return index_defs
681 
682  def _getDoubleType(self):
683  """DOUBLE type is database-specific, select one based on dialect.
684 
685  Returns
686  -------
687  type_object : `object`
688  Database-specific type definition.
689  """
690  if self._engine.name == 'mysql':
691  from sqlalchemy.dialects.mysql import DOUBLE
692  return DOUBLE(asdecimal=False)
693  elif self._engine.name == 'postgresql':
694  from sqlalchemy.dialects.postgresql import DOUBLE_PRECISION
695  return DOUBLE_PRECISION
696  elif self._engine.name == 'oracle':
697  from sqlalchemy.dialects.oracle import DOUBLE_PRECISION
698  return DOUBLE_PRECISION
699  elif self._engine.name == 'sqlite':
700  # all floats in sqlite are 8-byte
701  from sqlalchemy.dialects.sqlite import REAL
702  return REAL
703  else:
704  raise TypeError('cannot determine DOUBLE type, unexpected dialect: ' + self._engine.name)
def getAfwColumns(self, table_name)
Definition: ppdbSchema.py:432
def _tableIndices(self, table_name, info)
Definition: ppdbSchema.py:648
def getColumnMap(self, table_name)
Definition: ppdbSchema.py:454
def _tableColumns(self, table_name)
Definition: ppdbSchema.py:588
daf::base::PropertySet * set
Definition: fits.cc:832
def _buildSchemas(self, schema_file, extra_schema_file=None, afw_schemas=None)
Definition: ppdbSchema.py:471
def _makeTables(self, mysql_engine='InnoDB', oracle_tablespace=None, oracle_iot=False)
Definition: ppdbSchema.py:267
def makeSchema(self, drop=False, mysql_engine='InnoDB', oracle_tablespace=None, oracle_iot=False)
Definition: ppdbSchema.py:334
def __init__(self, engine, dia_object_index, dia_object_nightly, schema_file, extra_schema_file=None, column_map=None, afw_schemas=None, prefix="")
Definition: ppdbSchema.py:218
def make_minimal_dia_source_schema()
Definition: ppdbSchema.py:84
def getAfwSchema(self, table_name, columns=None)
Definition: ppdbSchema.py:364
std::vector< SchemaItem< Flag > > * items
def make_minimal_dia_object_schema()
Definition: ppdbSchema.py:69
daf::base::PropertyList * list
Definition: fits.cc:833
def _field2dict(self, field, table_name)
Definition: ppdbSchema.py:624