Skip to content

Commit 7cae37d

Browse files
authored
Merge pull request #276 from sot/exactly-one-data-source
Exactly one data source
2 parents 45cc572 + 937c4ad commit 7cae37d

File tree

5 files changed

+125
-118
lines changed

5 files changed

+125
-118
lines changed

cheta/converters.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ def _convert(dat):
158158
out_array = np.zeros(shape=len(dat), dtype=default_dtypes[out_name])
159159
quality = True
160160

161-
assert out_array.ndim == 1
161+
assert out_array.ndim == 1 # noqa: S101
162162
out_arrays[out_name] = out_array
163163
out_quality[:, out_names.index(out_name)] = quality
164164

cheta/data_source.py

Lines changed: 113 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,113 @@
1+
"""Provide a singleton class to define data source (CXC or MAUDE or combination)"""
2+
3+
import ast
4+
5+
# Default source of data.
6+
DEFAULT_DATA_SOURCE = "cxc"
7+
8+
9+
class data_source:
10+
"""
11+
Context manager and quasi-singleton configuration object for managing the
12+
data_source(s) used for fetching telemetry.
13+
"""
14+
15+
_data_sources = (DEFAULT_DATA_SOURCE,)
16+
_allowed = ("cxc", "maude", "test-drop-half")
17+
18+
def __init__(self, *data_sources):
19+
self._new_data_sources = data_sources
20+
21+
def __enter__(self):
22+
self._orig_data_sources = self.__class__._data_sources
23+
self.set(*self._new_data_sources)
24+
25+
def __exit__(self, type, value, traceback):
26+
self.__class__._data_sources = self._orig_data_sources
27+
28+
@classmethod
29+
def set(cls, *data_sources):
30+
"""
31+
Set current data sources.
32+
33+
:param data_sources: one or more sources (str)
34+
"""
35+
if any(
36+
data_source.split()[0] not in cls._allowed for data_source in data_sources
37+
):
38+
raise ValueError(
39+
"data_sources {} not in allowed set {}".format(
40+
data_sources, cls._allowed
41+
)
42+
)
43+
44+
if len(data_sources) == 0:
45+
raise ValueError(
46+
"must select at least one data source in {}".format(cls._allowed)
47+
)
48+
49+
cls._data_sources = data_sources
50+
51+
@classmethod
52+
def sources(cls, include_test=True):
53+
"""
54+
Get tuple of current data sources names.
55+
56+
:param include_test: include sources that start with 'test'
57+
:returns: tuple of data source names
58+
"""
59+
if include_test:
60+
sources = cls._data_sources
61+
else:
62+
sources = [x for x in cls._data_sources if not x.startswith("test")]
63+
64+
return tuple(source.split()[0] for source in sources)
65+
66+
@classmethod
67+
def get_msids(cls, source):
68+
"""
69+
Get the set of MSID names corresponding to ``source`` (e.g. 'cxc' or 'maude')
70+
71+
:param source: str
72+
:returns: set of MSIDs
73+
"""
74+
import cheta.fetch # noqa: PLC0415
75+
76+
source = source.split()[0]
77+
78+
if source == "cxc":
79+
out = list(cheta.fetch.content.keys())
80+
elif source == "maude":
81+
import maude # noqa: PLC0415
82+
83+
out = list(maude.MSIDS.keys())
84+
else:
85+
raise ValueError('source must be "cxc" or "msid"')
86+
87+
return set(out)
88+
89+
@classmethod
90+
def options(cls):
91+
"""
92+
Get the data sources and corresponding options as a dict.
93+
94+
Example::
95+
96+
>>> data_source.set('cxc', 'maude allow_subset=False')
97+
>>> data_source.options()
98+
{'cxc': {}, 'maude': {'allow_subset': False}}
99+
100+
:returns: dict of data source options
101+
"""
102+
103+
out = {}
104+
for source in cls._data_sources:
105+
vals = source.split()
106+
name, opts = vals[0], vals[1:]
107+
out[name] = {}
108+
for opt in opts:
109+
key, val = opt.split("=")
110+
val = ast.literal_eval(val)
111+
out[name][key] = val
112+
113+
return out

cheta/fetch.py

Lines changed: 2 additions & 113 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,8 @@
2323
from Chandra.Time import DateTime
2424
from ska_helpers.utils import lru_cache_timed
2525

26+
from cheta.data_source import DEFAULT_DATA_SOURCE, data_source
27+
2628
from . import (
2729
__version__, # noqa
2830
cache,
@@ -138,119 +140,6 @@
138140
# Cached version (by content type) of first and last available times in archive
139141
CONTENT_TIME_RANGES = {}
140142

141-
# Default source of data.
142-
DEFAULT_DATA_SOURCE = "cxc"
143-
144-
145-
class _DataSource(object):
146-
"""
147-
Context manager and quasi-singleton configuration object for managing the
148-
data_source(s) used for fetching telemetry.
149-
"""
150-
151-
_data_sources = (DEFAULT_DATA_SOURCE,)
152-
_allowed = ("cxc", "maude", "test-drop-half")
153-
154-
def __init__(self, *data_sources):
155-
self._new_data_sources = data_sources
156-
157-
def __enter__(self):
158-
self._orig_data_sources = self.__class__._data_sources
159-
self.set(*self._new_data_sources)
160-
161-
def __exit__(self, type, value, traceback):
162-
self.__class__._data_sources = self._orig_data_sources
163-
164-
@classmethod
165-
def set(cls, *data_sources):
166-
"""
167-
Set current data sources.
168-
169-
:param data_sources: one or more sources (str)
170-
"""
171-
if any(
172-
data_source.split()[0] not in cls._allowed for data_source in data_sources
173-
):
174-
raise ValueError(
175-
"data_sources {} not in allowed set {}".format(
176-
data_sources, cls._allowed
177-
)
178-
)
179-
180-
if len(data_sources) == 0:
181-
raise ValueError(
182-
"must select at least one data source in {}".format(cls._allowed)
183-
)
184-
185-
cls._data_sources = data_sources
186-
187-
@classmethod
188-
def sources(cls, include_test=True):
189-
"""
190-
Get tuple of current data sources names.
191-
192-
:param include_test: include sources that start with 'test'
193-
:returns: tuple of data source names
194-
"""
195-
if include_test:
196-
sources = cls._data_sources
197-
else:
198-
sources = [x for x in cls._data_sources if not x.startswith("test")]
199-
200-
return tuple(source.split()[0] for source in sources)
201-
202-
@classmethod
203-
def get_msids(cls, source):
204-
"""
205-
Get the set of MSID names corresponding to ``source`` (e.g. 'cxc' or 'maude')
206-
207-
:param source: str
208-
:returns: set of MSIDs
209-
"""
210-
source = source.split()[0]
211-
212-
if source == "cxc":
213-
out = list(content.keys())
214-
elif source == "maude":
215-
import maude
216-
217-
out = list(maude.MSIDS.keys())
218-
else:
219-
raise ValueError('source must be "cxc" or "msid"')
220-
221-
return set(out)
222-
223-
@classmethod
224-
def options(cls):
225-
"""
226-
Get the data sources and corresponding options as a dict.
227-
228-
Example::
229-
230-
>>> data_source.set('cxc', 'maude allow_subset=False')
231-
>>> data_source.options()
232-
{'cxc': {}, 'maude': {'allow_subset': False}}
233-
234-
:returns: dict of data source options
235-
"""
236-
import ast
237-
238-
out = {}
239-
for source in cls._data_sources:
240-
vals = source.split()
241-
name, opts = vals[0], vals[1:]
242-
out[name] = {}
243-
for opt in opts:
244-
key, val = opt.split("=")
245-
val = ast.literal_eval(val)
246-
out[name][key] = val
247-
248-
return out
249-
250-
251-
# Public interface is a "data_source" module attribute
252-
data_source = _DataSource
253-
254143

255144
def local_or_remote_function(remote_print_output):
256145
"""

cheta/update_client_archive.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -515,7 +515,7 @@ def as_python(val):
515515
db.insert(vals, "archfiles")
516516
except sqlite3.IntegrityError as err:
517517
# Expected exception for archfiles already in the table
518-
assert "UNIQUE constraint failed: archfiles.filename" in str(
518+
assert "UNIQUE constraint failed: archfiles.filename" in str( # noqa: S101
519519
err
520520
)
521521

ruff-base.toml

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
1-
# Copied originally from pandas. This config requires ruff >= 0.2.
2-
target-version = "py310"
1+
target-version = "py312"
32

43
# fix = true
54
lint.unfixable = []
@@ -31,6 +30,7 @@ lint.extend-select = [
3130
"ARG001", # Unused function argument
3231
"RSE102", # Unnecessary parentheses on raised exception
3332
"PERF401", # Use a list comprehension to create a transformed list
33+
"S101", # Use of `assert` detected
3434
]
3535

3636
lint.ignore = [
@@ -40,10 +40,14 @@ lint.ignore = [
4040
"PLR2004", # Magic number
4141
"B028", # No explicit `stacklevel` keyword argument found
4242
"PLR0913", # Too many arguments to function call
43+
"PLR1730", # Checks for if statements that can be replaced with min() or max() calls
44+
"PLC0415", # `import` should be at the top-level of a file
45+
"PLW1641", # Class implements `__hash__` if `__eq__` is implemented
4346
]
4447

4548
extend-exclude = [
4649
"docs",
50+
"build",
4751
]
4852

4953
[lint.pycodestyle]
@@ -55,4 +59,5 @@ max-line-length = 100 # E501 reports lines that exceed the length of 100.
5559
# - D205: Don't worry about test docstrings
5660
# - ARG001: Unused function argument false positives for some fixtures
5761
# - E501: Line-too-long
58-
"**/tests/test_*.py" = ["D205", "ARG001", "E501"]
62+
# - S101: Do not use assert
63+
"**/tests/test_*.py" = ["D205", "ARG001", "E501", "S101"]

0 commit comments

Comments
 (0)