Skip to content

Commit 8f8f4b8

Browse files
authored
Merge pull request #23 from tskisner/format
Enforce coding format with uncrustify and black.
2 parents ecd4826 + eb8a4a8 commit 8f8f4b8

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

66 files changed

+8465
-8231
lines changed

.atom/config.cson

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
"*":
2+
"atom-beautify":
3+
c:
4+
beautify_on_save: true
5+
configPath: "src/uncrustify.cfg"
6+
cpp:
7+
beautify_on_save: true
8+
configPath: "src/uncrustify.cfg"
9+
python: {}
10+
editor:
11+
autoIndentOnPaste: false
12+
fontSize: 18
13+
preferredLineLength: 88
14+
scrollPastEnd: true
15+
showInvisibles: true
16+
softWrap: true
17+
softWrapAtPreferredLineLength: true
18+
tabLength: 4
19+
tabType: "soft"
20+
"linter-flake8":
21+
maxLineLength: 88
22+
"python-black":
23+
fmtOnSave: true
24+
lineLength: 88

examples/demo_telescope.py

Lines changed: 91 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
#!/usr/bin/env python3
2-
##
3-
## TImestream DAta Storage (TIDAS)
4-
## Copyright (c) 2014-2018, all rights reserved. Use of this source code
5-
## is governed by a BSD-style license that can be found in the top-level
6-
## LICENSE file.
7-
##
2+
#
3+
# TImestream DAta Storage (TIDAS).
4+
#
5+
# Copyright (c) 2015-2019 by the parties listed in the AUTHORS file. All rights
6+
# reserved. Use of this source code is governed by a BSD-style license that can be
7+
# found in the LICENSE file.
88

99
# WARNING: Running this script will generate a several GB of data...
1010

@@ -37,12 +37,12 @@
3737
# ---- Data from a weather station ----
3838

3939
wfields = list()
40-
wfields.append( tidas.Field("windspeed", tdt.float32, "Meters / second") )
41-
wfields.append( tidas.Field("windangle", tdt.float32, "Degrees") )
42-
wfields.append( tidas.Field("temperature", tdt.float32, "Degrees Celsius") )
43-
wfields.append( tidas.Field("pressure", tdt.float32, "Millibars") )
44-
wfields.append( tidas.Field("humidity", tdt.float32, "Percent") )
45-
wfields.append( tidas.Field("PWV", tdt.float32, "mm") )
40+
wfields.append(tidas.Field("windspeed", tdt.float32, "Meters / second"))
41+
wfields.append(tidas.Field("windangle", tdt.float32, "Degrees"))
42+
wfields.append(tidas.Field("temperature", tdt.float32, "Degrees Celsius"))
43+
wfields.append(tidas.Field("pressure", tdt.float32, "Millibars"))
44+
wfields.append(tidas.Field("humidity", tdt.float32, "Percent"))
45+
wfields.append(tidas.Field("PWV", tdt.float32, "mm"))
4646
weather_schema = tidas.Schema(wfields)
4747

4848
# sampled every 10 seconds
@@ -52,8 +52,8 @@
5252
# ---- Housekeeping data ----
5353

5454
hfields = list()
55-
hfields.append( tidas.Field("thermo1", tdt.float32, "Degrees Kelvin") )
56-
hfields.append( tidas.Field("thermo2", tdt.float32, "Degrees Kelvin") )
55+
hfields.append(tidas.Field("thermo1", tdt.float32, "Degrees Kelvin"))
56+
hfields.append(tidas.Field("thermo2", tdt.float32, "Degrees Kelvin"))
5757
hk_schema = tidas.Schema(hfields)
5858

5959
# sampled once per minute
@@ -63,9 +63,9 @@
6363
# ---- Pointing data ----
6464

6565
pfields = list()
66-
pfields.append( tidas.Field("az", tdt.float32, "Radians") )
67-
pfields.append( tidas.Field("el", tdt.float32, "Radians") )
68-
pfields.append( tidas.Field("psi", tdt.float32, "Radians") )
66+
pfields.append(tidas.Field("az", tdt.float32, "Radians"))
67+
pfields.append(tidas.Field("el", tdt.float32, "Radians"))
68+
pfields.append(tidas.Field("psi", tdt.float32, "Radians"))
6969
pointing_schema = tidas.Schema(pfields)
7070

7171
# sampled at 20 Hz
@@ -80,7 +80,7 @@
8080

8181
for d in range(ndet):
8282
detname = "det_{:04d}".format(d)
83-
dfields.append( tidas.Field(detname, tdt.int16, "ADU") )
83+
dfields.append(tidas.Field(detname, tdt.int16, "ADU"))
8484

8585
det_schema = tidas.Schema(dfields)
8686

@@ -98,16 +98,15 @@
9898
# Create the volume all at once. To keep the size of the volume
9999
# reasonable for this demo, only write 3 days of data.
100100

101-
vol = tidas.Volume(path, tidas.BackendType.hdf5,
102-
tidas.CompressionType.none, dict())
101+
vol = tidas.Volume(path, tidas.BackendType.hdf5, tidas.CompressionType.none, dict())
103102

104103
# Get the root block of the volume
105104
root = vol.root()
106105

107106
volstart = datetime.datetime(2018, 1, 1)
108107
volstartsec = volstart.timestamp()
109108

110-
for year in ["2018",]:
109+
for year in ["2018"]:
111110
# Add a block for this year
112111
yb = root.block_add(year, tidas.Block())
113112

@@ -120,8 +119,7 @@
120119
for dy in range(1, 4):
121120

122121
daystart = datetime.datetime(int(year), monthnum, dy)
123-
daystartsec = (daystart - volstart).total_seconds() \
124-
+ volstartsec
122+
daystartsec = (daystart - volstart).total_seconds() + volstartsec
125123

126124
# Add a block for the day
127125
day = "{:02d}".format(dy)
@@ -137,83 +135,112 @@
137135

138136
print(" writing weather data")
139137

140-
weather = tidas.Group(weather_schema, tidas.Dictionary(),
141-
weather_daysamples)
138+
weather = tidas.Group(
139+
weather_schema, tidas.Dictionary(), weather_daysamples
140+
)
142141
weather = db.group_add("weather", weather)
143-
weather.write_times(0, np.linspace(daystartsec,
144-
daystartsec + day_seconds, num=weather_daysamples))
145-
146-
data = np.absolute(np.random.normal(loc=0.0, scale=5.0,
147-
size=weather_daysamples)).astype(np.float32)
142+
weather.write_times(
143+
0,
144+
np.linspace(
145+
daystartsec, daystartsec + day_seconds, num=weather_daysamples
146+
),
147+
)
148+
149+
data = np.absolute(
150+
np.random.normal(loc=0.0, scale=5.0, size=weather_daysamples)
151+
).astype(np.float32)
148152
weather.write("windspeed", 0, data)
149153

150-
data = 360.0 * np.absolute(np.random.random(
151-
size=weather_daysamples)).astype(np.float32)
154+
data = 360.0 * np.absolute(
155+
np.random.random(size=weather_daysamples)
156+
).astype(np.float32)
152157
weather.write("windangle", 0, data)
153158

154-
data = np.absolute(np.random.normal(loc=25.0, scale=5.0,
155-
size=weather_daysamples)).astype(np.float32)
159+
data = np.absolute(
160+
np.random.normal(loc=25.0, scale=5.0, size=weather_daysamples)
161+
).astype(np.float32)
156162
weather.write("temperature", 0, data)
157163

158-
data = np.absolute(np.random.normal(loc=1013.25, scale=30.0,
159-
size=weather_daysamples)).astype(np.float32)
164+
data = np.absolute(
165+
np.random.normal(loc=1013.25, scale=30.0, size=weather_daysamples)
166+
).astype(np.float32)
160167
weather.write("pressure", 0, data)
161168

162-
data = np.absolute(np.random.normal(loc=30.0, scale=10.0,
163-
size=weather_daysamples)).astype(np.float32)
169+
data = np.absolute(
170+
np.random.normal(loc=30.0, scale=10.0, size=weather_daysamples)
171+
).astype(np.float32)
164172
weather.write("humidity", 0, data)
165173

166-
data = np.absolute(np.random.normal(loc=10.0, scale=5.0,
167-
size=weather_daysamples)).astype(np.float32)
174+
data = np.absolute(
175+
np.random.normal(loc=10.0, scale=5.0, size=weather_daysamples)
176+
).astype(np.float32)
168177
weather.write("PWV", 0, data)
169178

170179
print(" writing housekeeping data")
171180

172181
hk = tidas.Group(hk_schema, tidas.Dictionary(), hk_daysamples)
173182
hk = db.group_add("hk", hk)
174-
hk.write_times(0, np.linspace(daystartsec,
175-
daystartsec + day_seconds, num=hk_daysamples))
176-
177-
data = np.random.normal(loc=273.0, scale=5.0,
178-
size=hk_daysamples).astype(np.float32)
183+
hk.write_times(
184+
0,
185+
np.linspace(daystartsec, daystartsec + day_seconds, num=hk_daysamples),
186+
)
187+
188+
data = np.random.normal(loc=273.0, scale=5.0, size=hk_daysamples).astype(
189+
np.float32
190+
)
179191
hk.write("thermo1", 0, data)
180192

181-
data = np.random.normal(loc=77.0, scale=5.0,
182-
size=hk_daysamples).astype(np.float32)
193+
data = np.random.normal(loc=77.0, scale=5.0, size=hk_daysamples).astype(
194+
np.float32
195+
)
183196
hk.write("thermo2", 0, data)
184197

185198
print(" writing pointing data")
186199

187-
pointing = tidas.Group(pointing_schema, tidas.Dictionary(),
188-
pointing_daysamples)
200+
pointing = tidas.Group(
201+
pointing_schema, tidas.Dictionary(), pointing_daysamples
202+
)
189203
pointing = db.group_add("pointing", pointing)
190-
pointing.write_times(0, np.linspace(daystartsec,
191-
daystartsec + day_seconds, num=pointing_daysamples))
192-
193-
data = 2.0 * np.pi * np.random.random(
194-
size=pointing_daysamples).astype(np.float32)
204+
pointing.write_times(
205+
0,
206+
np.linspace(
207+
daystartsec, daystartsec + day_seconds, num=pointing_daysamples
208+
),
209+
)
210+
211+
data = (
212+
2.0
213+
* np.pi
214+
* np.random.random(size=pointing_daysamples).astype(np.float32)
215+
)
195216
pointing.write("az", 0, data)
196217

197-
data = 0.4 * np.pi * np.random.random(
198-
size=pointing_daysamples).astype(np.float32)
218+
data = (
219+
0.4
220+
* np.pi
221+
* np.random.random(size=pointing_daysamples).astype(np.float32)
222+
)
199223
pointing.write("el", 0, data)
200224

201-
data = np.random.normal(loc=0.0, scale=(0.01*np.pi),
202-
size=pointing_daysamples).astype(np.float32)
225+
data = np.random.normal(
226+
loc=0.0, scale=(0.01 * np.pi), size=pointing_daysamples
227+
).astype(np.float32)
203228
pointing.write("psi", 0, data)
204229

205230
print(" writing detector data")
206231

207-
det = tidas.Group(det_schema, tidas.Dictionary(),
208-
det_daysamples)
232+
det = tidas.Group(det_schema, tidas.Dictionary(), det_daysamples)
209233
det = db.group_add("detectors", det)
210-
det.write_times(0, np.linspace(daystartsec,
211-
daystartsec + day_seconds, num=det_daysamples))
234+
det.write_times(
235+
0,
236+
np.linspace(daystartsec, daystartsec + day_seconds, num=det_daysamples),
237+
)
212238

213239
for d in range(ndet):
214240
detname = "det_{:04d}".format(d)
215-
data = np.random.normal(loc=32768, scale=2000,
216-
size=det_daysamples).astype(np.int16)
241+
data = np.random.normal(
242+
loc=32768, scale=2000, size=det_daysamples
243+
).astype(np.int16)
217244
det.write(detname, 0, data)
218245

219246
# Take a quick peek at organization:

0 commit comments

Comments
 (0)