LSST Applications g070148d5b3+33e5256705,g0d53e28543+25c8b88941,g0da5cf3356+2dd1178308,g1081da9e2a+62d12e78cb,g17e5ecfddb+7e422d6136,g1c76d35bf8+ede3a706f7,g295839609d+225697d880,g2e2c1a68ba+cc1f6f037e,g2ffcdf413f+853cd4dcde,g38293774b4+62d12e78cb,g3b44f30a73+d953f1ac34,g48ccf36440+885b902d19,g4b2f1765b6+7dedbde6d2,g5320a0a9f6+0c5d6105b6,g56b687f8c9+ede3a706f7,g5c4744a4d9+ef6ac23297,g5ffd174ac0+0c5d6105b6,g6075d09f38+66af417445,g667d525e37+2ced63db88,g670421136f+2ced63db88,g71f27ac40c+2ced63db88,g774830318a+463cbe8d1f,g7876bc68e5+1d137996f1,g7985c39107+62d12e78cb,g7fdac2220c+0fd8241c05,g96f01af41f+368e6903a7,g9ca82378b8+2ced63db88,g9d27549199+ef6ac23297,gabe93b2c52+e3573e3735,gb065e2a02a+3dfbe639da,gbc3249ced9+0c5d6105b6,gbec6a3398f+0c5d6105b6,gc9534b9d65+35b9f25267,gd01420fc67+0c5d6105b6,geee7ff78d7+a14128c129,gf63283c776+ede3a706f7,gfed783d017+0c5d6105b6,w.2022.47
LSST Data Management Base Package
Loading...
Searching...
No Matches
mapper.py
Go to the documentation of this file.
1#!/usr/bin/env python
2
3#
4# LSST Data Management System
5# Copyright 2008, 2009, 2010 LSST Corporation.
6#
7# This product includes software developed by the
8# LSST Project (http://www.lsst.org/).
9#
10# This program is free software: you can redistribute it and/or modify
11# it under the terms of the GNU General Public License as published by
12# the Free Software Foundation, either version 3 of the License, or
13# (at your option) any later version.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the LSST License Statement and
21# the GNU General Public License along with this program. If not,
22# see <http://www.lsstcorp.org/LegalNotices/>.
23#
24from . import Policy
25from .deprecation import deprecate_class
26
27"""This module defines the Mapper base class."""
28
29
30@deprecate_class
31class Mapper:
32 """Mapper is a base class for all mappers.
33
34 Subclasses may define the following methods:
35
36 map_{datasetType}(self, dataId, write)
37 Map a dataset id for the given dataset type into a ButlerLocation.
38 If write=True, this mapping is for an output dataset.
39
40 query_{datasetType}(self, key, format, dataId)
41 Return the possible values for the format fields that would produce
42 datasets at the granularity of key in combination with the provided
43 partial dataId.
44
45 std_{datasetType}(self, item)
46 Standardize an object of the given data set type.
47
48 Methods that must be overridden:
49
50 keys(self)
51 Return a list of the keys that can be used in data ids.
52
53 Other public methods:
54
55 __init__(self)
56
57 getDatasetTypes(self)
58
59 map(self, datasetType, dataId, write=False)
60
61 queryMetadata(self, datasetType, key, format, dataId)
62
63 canStandardize(self, datasetType)
64
65 standardize(self, datasetType, item, dataId)
66
67 validate(self, dataId)
68 """
69
70 @staticmethod
71 def Mapper(cfg):
72 '''Instantiate a Mapper from a configuration.
73 In come cases the cfg may have already been instantiated into a Mapper, this is allowed and
74 the input var is simply returned.
75
76 :param cfg: the cfg for this mapper. It is recommended this be created by calling
77 Mapper.cfg()
78 :return: a Mapper instance
79 '''
80 if isinstance(cfg, Policy):
81 return cfg['cls'](cfg)
82 return cfg
83
84 def __new__(cls, *args, **kwargs):
85 """Create a new Mapper, saving arguments for pickling.
86
87 This is in __new__ instead of __init__ to save the user
88 from having to save the arguments themselves (either explicitly,
89 or by calling the super's __init__ with all their
90 *args,**kwargs. The resulting pickling system (of __new__,
91 __getstate__ and __setstate__ is similar to how __reduce__
92 is usually used, except that we save the user from any
93 responsibility (except when overriding __new__, but that
94 is not common).
95 """
96 self = super().__new__(cls)
97 self._arguments = (args, kwargs)
98 return self
99
100 def __init__(self, **kwargs):
101 pass
102
103 def __getstate__(self):
104 return self._arguments
105
106 def __setstate__(self, state):
107 self._arguments = state
108 args, kwargs = state
109 self.__init__(*args, **kwargs)
110
111 def keys(self):
112 raise NotImplementedError("keys() unimplemented")
113
114 def queryMetadata(self, datasetType, format, dataId):
115 """Get possible values for keys given a partial data id.
116
117 :param datasetType: see documentation about the use of datasetType
118 :param key: this is used as the 'level' parameter
119 :param format:
120 :param dataId: see documentation about the use of dataId
121 :return:
122 """
123 func = getattr(self, 'query_' + datasetType)
124
125 val = func(format, self.validate(dataId))
126 return val
127
129 """Return a list of the mappable dataset types."""
130
131 list = []
132 for attr in dir(self):
133 if attr.startswith("map_"):
134 list.append(attr[4:])
135 return list
136
137 def map(self, datasetType, dataId, write=False):
138 """Map a data id using the mapping method for its dataset type.
139
140 Parameters
141 ----------
142 datasetType : string
143 The datasetType to map
144 dataId : DataId instance
145 The dataId to use when mapping
146 write : bool, optional
147 Indicates if the map is being performed for a read operation
148 (False) or a write operation (True)
149
150 Returns
151 -------
152 ButlerLocation or a list of ButlerLocation
153 The location(s) found for the map operation. If write is True, a
154 list is returned. If write is False a single ButlerLocation is
155 returned.
156
157 Raises
158 ------
159 NoResults
160 If no locaiton was found for this map operation, the derived mapper
161 class may raise a lsst.daf.persistence.NoResults exception. Butler
162 catches this and will look in the next Repository if there is one.
163 """
164 func = getattr(self, 'map_' + datasetType)
165 return func(self.validate(dataId), write)
166
167 def canStandardize(self, datasetType):
168 """Return true if this mapper can standardize an object of the given
169 dataset type."""
170
171 return hasattr(self, 'std_' + datasetType)
172
173 def standardize(self, datasetType, item, dataId):
174 """Standardize an object using the standardization method for its data
175 set type, if it exists."""
176
177 if hasattr(self, 'std_' + datasetType):
178 func = getattr(self, 'std_' + datasetType)
179 return func(item, self.validate(dataId))
180 return item
181
182 def validate(self, dataId):
183 """Validate a dataId's contents.
184
185 If the dataId is valid, return it. If an invalid component can be
186 transformed into a valid one, copy the dataId, fix the component, and
187 return the copy. Otherwise, raise an exception."""
188
189 return dataId
190
191 def backup(self, datasetType, dataId):
192 """Rename any existing object with the given type and dataId.
193
194 Not implemented in the base mapper.
195 """
196 raise NotImplementedError("Base-class Mapper does not implement backups")
197
198 def getRegistry(self):
199 """Get the registry"""
200 return None
table::Key< int > a
def map(self, datasetType, dataId, write=False)
Definition: mapper.py:137
def __new__(cls, *args, **kwargs)
Definition: mapper.py:84
def __init__(self, **kwargs)
Definition: mapper.py:100
def standardize(self, datasetType, item, dataId)
Definition: mapper.py:173
def queryMetadata(self, datasetType, format, dataId)
Definition: mapper.py:114
def canStandardize(self, datasetType)
Definition: mapper.py:167
def backup(self, datasetType, dataId)
Definition: mapper.py:191