|
| 1 | +#!/usr/bin/env python3 |
| 2 | +## |
| 3 | +## TImestream DAta Storage (TIDAS) |
| 4 | +## Copyright (c) 2014-2017, all rights reserved. Use of this source code |
| 5 | +## is governed by a BSD-style license that can be found in the top-level |
| 6 | +## LICENSE file. |
| 7 | +## |
| 8 | + |
| 9 | +# WARNING: Running this script will generate a several GB of data... |
| 10 | + |
| 11 | +# This is just a toy exercise. We assume continuous data collection with |
| 12 | +# no gaps. We use very simple types of detector and housekeeping data. |
| 13 | +# In a "real" dataset, we would be unpacking raw frame data from a data |
| 14 | +# acquisition system and would know how many samples we have for a given |
| 15 | +# observation. Here we just make these numbers up. |
| 16 | + |
| 17 | +# THIS IS JUST AN EXAMPLE. |
| 18 | + |
| 19 | +import os |
| 20 | +import sys |
| 21 | +import shutil |
| 22 | + |
| 23 | +import numpy as np |
| 24 | + |
| 25 | +import datetime |
| 26 | +import calendar |
| 27 | + |
| 28 | +import tidas |
| 29 | + |
| 30 | +# The name of the volume |
| 31 | + |
| 32 | +path = "demo_telescope" |
| 33 | + |
| 34 | +# Create the schemas for the data groups that we will have for each day |
| 35 | + |
| 36 | +# ---- Data from a weather station ---- |
| 37 | + |
| 38 | +weather_schema = { |
| 39 | + "windspeed" : ("float32", "Meters / second"), |
| 40 | + "windangle" : ("float32", "Degrees"), |
| 41 | + "temperature" : ("float32", "Degrees Celsius"), |
| 42 | + "pressure" : ("float32", "Millibars"), |
| 43 | + "humidity" : ("float32", "Percent"), |
| 44 | + "PWV" : ("float32", "mm") |
| 45 | +} |
| 46 | + |
| 47 | +# sampled every 10 seconds |
| 48 | +weather_rate = 1.0 / 10.0 |
| 49 | +weather_daysamples = int(24.0 * 3600.0 * weather_rate) |
| 50 | + |
| 51 | +# ---- Housekeeping data ---- |
| 52 | + |
| 53 | +hk_schema = { |
| 54 | + "thermo1" : ("float32", "Degrees Kelvin"), |
| 55 | + "thermo2" : ("float32", "Degrees Kelvin") |
| 56 | +} |
| 57 | + |
| 58 | +# sampled once per minute |
| 59 | +hk_rate = 1.0 / 60.0 |
| 60 | +hk_daysamples = int(24.0 * 3600.0 * hk_rate) |
| 61 | + |
| 62 | +# ---- Pointing data ---- |
| 63 | + |
| 64 | +pointing_schema = { |
| 65 | + "az" : ("float32", "Radians"), |
| 66 | + "el" : ("float32", "Radians"), |
| 67 | + "psi" : ("float32", "Radians") |
| 68 | +} |
| 69 | + |
| 70 | +# sampled at 20 Hz |
| 71 | +pointing_rate = 20.0 |
| 72 | +pointing_daysamples = int(24.0 * 3600.0 * pointing_rate) |
| 73 | + |
| 74 | +# ---- Detector data ---- |
| 75 | + |
| 76 | +ndet = 50 |
| 77 | + |
| 78 | +det_schema = {} |
| 79 | + |
| 80 | +for d in range(ndet): |
| 81 | + detname = "det_{:04d}".format(d) |
| 82 | + det_schema[detname] = ("int16", "ADU") |
| 83 | + |
| 84 | +# sampled at 100Hz |
| 85 | +det_rate = 100.0 |
| 86 | +det_daysamples = int(24.0 * 3600.0 * det_rate) |
| 87 | + |
| 88 | +day_seconds = 24 * 3600 |
| 89 | + |
| 90 | +# Remove the volume if it exists |
| 91 | + |
| 92 | +if os.path.isdir(path): |
| 93 | + shutil.rmtree(path) |
| 94 | + |
| 95 | +# Create the volume all at once. To keep the size of the volume |
| 96 | +# reasonable for this demo, only write 3 days of data. |
| 97 | + |
| 98 | +with tidas.Volume(path, backend="hdf5") as vol: |
| 99 | + |
| 100 | + # Get the root block of the volume |
| 101 | + root = vol.root() |
| 102 | + |
| 103 | + volstart = datetime.datetime(2018, 1, 1) |
| 104 | + volstartsec = volstart.timestamp() |
| 105 | + |
| 106 | + for year in ["2018",]: |
| 107 | + # Add a block for this year |
| 108 | + yb = root.block_add(year) |
| 109 | + |
| 110 | + for monthnum in range(1, 2): |
| 111 | + # Add a block for the month |
| 112 | + month = calendar.month_abbr[monthnum] |
| 113 | + mb = yb.block_add(month) |
| 114 | + |
| 115 | + weekday, nday = calendar.monthrange(int(year), monthnum) |
| 116 | + for dy in range(1, 4): |
| 117 | + |
| 118 | + daystart = datetime.datetime(int(year), monthnum, dy) |
| 119 | + daystartsec = (daystart - volstart).total_seconds() \ |
| 120 | + + volstartsec |
| 121 | + |
| 122 | + # Add a block for the day |
| 123 | + day = "{:02d}".format(dy) |
| 124 | + db = mb.block_add(day) |
| 125 | + |
| 126 | + # Just fake some seed for now |
| 127 | + seed = int(year) * 1000000 + monthnum * 10000 + dy * 100 |
| 128 | + np.random.seed(seed) |
| 129 | + |
| 130 | + # Now we are going to add the data groups for this day. |
| 131 | + |
| 132 | + print("{}-{}-{:02d}:".format(year, month, dy)) |
| 133 | + |
| 134 | + print(" writing weather data") |
| 135 | + |
| 136 | + weather = tidas.Group(schema=weather_schema, |
| 137 | + size=weather_daysamples) |
| 138 | + weather = db.group_add("weather", weather) |
| 139 | + weather.write_times(np.linspace(daystartsec, |
| 140 | + daystartsec + day_seconds, num=weather_daysamples)) |
| 141 | + |
| 142 | + data = np.absolute(np.random.normal(loc=0.0, scale=5.0, |
| 143 | + size=weather_daysamples)).astype(np.float32) |
| 144 | + weather.write("windspeed", 0, data) |
| 145 | + |
| 146 | + data = 360.0 * np.absolute(np.random.random( |
| 147 | + size=weather_daysamples)).astype(np.float32) |
| 148 | + weather.write("windangle", 0, data) |
| 149 | + |
| 150 | + data = np.absolute(np.random.normal(loc=25.0, scale=5.0, |
| 151 | + size=weather_daysamples)).astype(np.float32) |
| 152 | + weather.write("temperature", 0, data) |
| 153 | + |
| 154 | + data = np.absolute(np.random.normal(loc=1013.25, scale=30.0, |
| 155 | + size=weather_daysamples)).astype(np.float32) |
| 156 | + weather.write("pressure", 0, data) |
| 157 | + |
| 158 | + data = np.absolute(np.random.normal(loc=30.0, scale=10.0, |
| 159 | + size=weather_daysamples)).astype(np.float32) |
| 160 | + weather.write("humidity", 0, data) |
| 161 | + |
| 162 | + data = np.absolute(np.random.normal(loc=10.0, scale=5.0, |
| 163 | + size=weather_daysamples)).astype(np.float32) |
| 164 | + weather.write("PWV", 0, data) |
| 165 | + |
| 166 | + print(" writing housekeeping data") |
| 167 | + |
| 168 | + hk = tidas.Group(schema=hk_schema, size=hk_daysamples) |
| 169 | + hk = db.group_add("hk", hk) |
| 170 | + hk.write_times(np.linspace(daystartsec, |
| 171 | + daystartsec + day_seconds, num=hk_daysamples)) |
| 172 | + |
| 173 | + data = np.random.normal(loc=273.0, scale=5.0, |
| 174 | + size=hk_daysamples).astype(np.float32) |
| 175 | + hk.write("thermo1", 0, data) |
| 176 | + |
| 177 | + data = np.random.normal(loc=77.0, scale=5.0, |
| 178 | + size=hk_daysamples).astype(np.float32) |
| 179 | + hk.write("thermo2", 0, data) |
| 180 | + |
| 181 | + print(" writing pointing data") |
| 182 | + |
| 183 | + pointing = tidas.Group(schema=pointing_schema, |
| 184 | + size=pointing_daysamples) |
| 185 | + pointing = db.group_add("pointing", pointing) |
| 186 | + pointing.write_times(np.linspace(daystartsec, |
| 187 | + daystartsec + day_seconds, num=pointing_daysamples)) |
| 188 | + |
| 189 | + data = 2.0 * np.pi * np.random.random( |
| 190 | + size=pointing_daysamples).astype(np.float32) |
| 191 | + pointing.write("az", 0, data) |
| 192 | + |
| 193 | + data = 0.4 * np.pi * np.random.random( |
| 194 | + size=pointing_daysamples).astype(np.float32) |
| 195 | + pointing.write("el", 0, data) |
| 196 | + |
| 197 | + data = np.random.normal(loc=0.0, scale=(0.01*np.pi), |
| 198 | + size=pointing_daysamples).astype(np.float32) |
| 199 | + pointing.write("psi", 0, data) |
| 200 | + |
| 201 | + print(" writing detector data") |
| 202 | + |
| 203 | + det = tidas.Group(schema=det_schema, size=det_daysamples) |
| 204 | + det = db.group_add("detectors", det) |
| 205 | + det.write_times(np.linspace(daystartsec, |
| 206 | + daystartsec + day_seconds, num=det_daysamples)) |
| 207 | + |
| 208 | + for d in range(ndet): |
| 209 | + detname = "det_{:04d}".format(d) |
| 210 | + data = np.random.normal(loc=32768, scale=2000, |
| 211 | + size=det_daysamples).astype(np.int16) |
| 212 | + det.write(detname, 0, data) |
| 213 | + |
| 214 | +# Take a quick peek at organization: |
| 215 | + |
| 216 | +with tidas.Volume(path) as vol: |
| 217 | + vol.info() |
| 218 | + |
0 commit comments