LSST Applications g0b6bd0c080+a72a5dd7e6,g1182afd7b4+2a019aa3bb,g17e5ecfddb+2b8207f7de,g1d67935e3f+06cf436103,g38293774b4+ac198e9f13,g396055baef+6a2097e274,g3b44f30a73+6611e0205b,g480783c3b1+98f8679e14,g48ccf36440+89c08d0516,g4b93dc025c+98f8679e14,g5c4744a4d9+a302e8c7f0,g613e996a0d+e1c447f2e0,g6c8d09e9e7+25247a063c,g7271f0639c+98f8679e14,g7a9cd813b8+124095ede6,g9d27549199+a302e8c7f0,ga1cf026fa3+ac198e9f13,ga32aa97882+7403ac30ac,ga786bb30fb+7a139211af,gaa63f70f4e+9994eb9896,gabf319e997+ade567573c,gba47b54d5d+94dc90c3ea,gbec6a3398f+06cf436103,gc6308e37c7+07dd123edb,gc655b1545f+ade567573c,gcc9029db3c+ab229f5caf,gd01420fc67+06cf436103,gd877ba84e5+06cf436103,gdb4cecd868+6f279b5b48,ge2d134c3d5+cc4dbb2e3f,ge448b5faa6+86d1ceac1d,gecc7e12556+98f8679e14,gf3ee170dca+25247a063c,gf4ac96e456+ade567573c,gf9f5ea5b4d+ac198e9f13,gff490e6085+8c2580be5c,w.2022.27
LSST Data Management Base Package
mapper.py
Go to the documentation of this file.
1#!/usr/bin/env python
2
3#
4# LSST Data Management System
5# Copyright 2008, 2009, 2010 LSST Corporation.
6#
7# This product includes software developed by the
8# LSST Project (http://www.lsst.org/).
9#
10# This program is free software: you can redistribute it and/or modify
11# it under the terms of the GNU General Public License as published by
12# the Free Software Foundation, either version 3 of the License, or
13# (at your option) any later version.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the LSST License Statement and
21# the GNU General Public License along with this program. If not,
22# see <http://www.lsstcorp.org/LegalNotices/>.
23#
24from . import Policy
25from .deprecation import deprecate_class
26
27"""This module defines the Mapper base class."""
28
29
30@deprecate_class
31class Mapper:
32 """Mapper is a base class for all mappers.
33
34 Subclasses may define the following methods:
35
36 map_{datasetType}(self, dataId, write)
37 Map a dataset id for the given dataset type into a ButlerLocation.
38 If write=True, this mapping is for an output dataset.
39
40 query_{datasetType}(self, key, format, dataId)
41 Return the possible values for the format fields that would produce
42 datasets at the granularity of key in combination with the provided
43 partial dataId.
44
45 std_{datasetType}(self, item)
46 Standardize an object of the given data set type.
47
48 Methods that must be overridden:
49
50 keys(self)
51 Return a list of the keys that can be used in data ids.
52
53 Other public methods:
54
55 __init__(self)
56
57 getDatasetTypes(self)
58
59 map(self, datasetType, dataId, write=False)
60
61 queryMetadata(self, datasetType, key, format, dataId)
62
63 canStandardize(self, datasetType)
64
65 standardize(self, datasetType, item, dataId)
66
67 validate(self, dataId)
68 """
69
70 @staticmethod
71 def Mapper(cfg):
72 '''Instantiate a Mapper from a configuration.
73 In come cases the cfg may have already been instantiated into a Mapper, this is allowed and
74 the input var is simply returned.
75
76 :param cfg: the cfg for this mapper. It is recommended this be created by calling
77 Mapper.cfg()
78 :return: a Mapper instance
79 '''
80 if isinstance(cfg, Policy):
81 return cfg['cls'](cfg)
82 return cfg
83
84 def __new__(cls, *args, **kwargs):
85 """Create a new Mapper, saving arguments for pickling.
86
87 This is in __new__ instead of __init__ to save the user
88 from having to save the arguments themselves (either explicitly,
89 or by calling the super's __init__ with all their
90 *args,**kwargs. The resulting pickling system (of __new__,
91 __getstate__ and __setstate__ is similar to how __reduce__
92 is usually used, except that we save the user from any
93 responsibility (except when overriding __new__, but that
94 is not common).
95 """
96 self = super().__new__(cls)
97 self._arguments_arguments = (args, kwargs)
98 return self
99
100 def __init__(self, **kwargs):
101 pass
102
103 def __getstate__(self):
104 return self._arguments
105
106 def __setstate__(self, state):
107 self._arguments_arguments = state
108 args, kwargs = state
109 self.__init____init__(*args, **kwargs)
110
111 def keys(self):
112 raise NotImplementedError("keys() unimplemented")
113
114 def queryMetadata(self, datasetType, format, dataId):
115 """Get possible values for keys given a partial data id.
116
117 :param datasetType: see documentation about the use of datasetType
118 :param key: this is used as the 'level' parameter
119 :param format:
120 :param dataId: see documentation about the use of dataId
121 :return:
122 """
123 func = getattr(self, 'query_' + datasetType)
124
125 val = func(format, self.validatevalidate(dataId))
126 return val
127
129 """Return a list of the mappable dataset types."""
130
131 list = []
132 for attr in dir(self):
133 if attr.startswith("map_"):
134 list.append(attr[4:])
135 return list
136
137 def map(self, datasetType, dataId, write=False):
138 """Map a data id using the mapping method for its dataset type.
139
140 Parameters
141 ----------
142 datasetType : string
143 The datasetType to map
144 dataId : DataId instance
145 The dataId to use when mapping
146 write : bool, optional
147 Indicates if the map is being performed for a read operation
148 (False) or a write operation (True)
149
150 Returns
151 -------
152 ButlerLocation or a list of ButlerLocation
153 The location(s) found for the map operation. If write is True, a
154 list is returned. If write is False a single ButlerLocation is
155 returned.
156
157 Raises
158 ------
159 NoResults
160 If no locaiton was found for this map operation, the derived mapper
161 class may raise a lsst.daf.persistence.NoResults exception. Butler
162 catches this and will look in the next Repository if there is one.
163 """
164 func = getattr(self, 'map_' + datasetType)
165 return func(self.validatevalidate(dataId), write)
166
167 def canStandardize(self, datasetType):
168 """Return true if this mapper can standardize an object of the given
169 dataset type."""
170
171 return hasattr(self, 'std_' + datasetType)
172
173 def standardize(self, datasetType, item, dataId):
174 """Standardize an object using the standardization method for its data
175 set type, if it exists."""
176
177 if hasattr(self, 'std_' + datasetType):
178 func = getattr(self, 'std_' + datasetType)
179 return func(item, self.validatevalidate(dataId))
180 return item
181
182 def validate(self, dataId):
183 """Validate a dataId's contents.
184
185 If the dataId is valid, return it. If an invalid component can be
186 transformed into a valid one, copy the dataId, fix the component, and
187 return the copy. Otherwise, raise an exception."""
188
189 return dataId
190
191 def backup(self, datasetType, dataId):
192 """Rename any existing object with the given type and dataId.
193
194 Not implemented in the base mapper.
195 """
196 raise NotImplementedError("Base-class Mapper does not implement backups")
197
198 def getRegistry(self):
199 """Get the registry"""
200 return None
table::Key< int > a
def map(self, datasetType, dataId, write=False)
Definition: mapper.py:137
def __new__(cls, *args, **kwargs)
Definition: mapper.py:84
def __init__(self, **kwargs)
Definition: mapper.py:100
def standardize(self, datasetType, item, dataId)
Definition: mapper.py:173
def queryMetadata(self, datasetType, format, dataId)
Definition: mapper.py:114
def canStandardize(self, datasetType)
Definition: mapper.py:167
def backup(self, datasetType, dataId)
Definition: mapper.py:191