Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -219,3 +219,4 @@ daq_data/logs

# data simulation
daq_data/simulated_data_dir/module_*
loki-data/
43 changes: 43 additions & 0 deletions protos/telemetry.proto
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,52 @@ import "google/protobuf/struct.proto";
import "google/protobuf/timestamp.proto";

service Telemetry {
rpc Log (LogMessage) returns (StatusResponse) {}
rpc ReportStatus(StatusRequest) returns (StatusResponse) {}
}

message LogMessage {
// --- Indexing ---
string host = 1;
string service_name = 2;

// --- Metadata ---
google.protobuf.Timestamp timestamp = 3;
// Severity enum (1=DEBUG, 2=INFO, etc) matches Python logging
int32 severity = 4;

// --- Context ---
string file_path = 5;
uint32 line_number = 6;
string function_name = 7;

// --- Runtime Metadata ---
int64 process_id = 8;
string thread_name = 9;

// --- Software Versioning (NEW) ---
string git_commit = 10;
string git_branch = 11;

// --- Payload ---
string payload_json = 12;
}

message LogResponse {
bool success = 1;
string message = 2;
}

enum Severity {
SEVERITY_UNSPECIFIED = 0;
DEBUG = 1;
INFO = 2;
WARNING = 3;
ERROR = 4;
CRITICAL = 5;
}


message StatusRequest {
string device_type = 1; // e.g. "gnss", "dew", "test"
string device_id = 2; // e.g. "dome_a"
Expand Down
8 changes: 5 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "panoseti-grpc"
version = "0.3.0"
version = "0.3.2"
description = "gRPC for the PANOSETI project."
readme = "README.md"
requires-python = ">=3.9"
Expand Down Expand Up @@ -48,8 +48,10 @@ dependencies = [
"watchfiles",
"aiofiles",
"pygnssutils",
"pydantic",
"tomli"
"pydantic>=2.12",
"tomli",
"requests",
"GitPython"
]

[project.optional-dependencies]
Expand Down
2 changes: 1 addition & 1 deletion scripts/run-ci-tests/run-telemetry-ci-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,6 @@ docker compose -f $COMPOSE_FILE up \

echo "--- Cleaning Up ---"
# Ensure all containers and networks are removed
docker compose -f $COMPOSE_FILE down
docker compose -f $COMPOSE_FILE down --volumes

echo "--- Telemetry CI Run Completed Successfully ---"
42 changes: 21 additions & 21 deletions src/panoseti_grpc/daq_data/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ See [client.py](daq_data/client.py) for the implementation and [daq_data_client_
3. Follow the code patterns provided in [daq_data_client_demo.ipynb](daq_data_client_demo.ipynb) to stream images from the DAQ nodes to your visualization program.

```python
from daq_data.client import DaqDataClient
from daq_data.plot import PanoImagePreviewer
from panoseti_grpc.daq_data.client import DaqDataClient
from panoseti_grpc.daq_data.plot import PanoImagePreviewer

# 0. Specify configuration file paths
daq_config_path = 'path/to/your/daq_config.json'
Expand Down Expand Up @@ -57,7 +57,7 @@ This information is given by [daq_config.json](https://github.com/panoseti/panos
Note that the client should always be used as a [context manager](https://book.pythontips.com/en/latest/context_managers.html) to ensure network resources are handled correctly.

```python
from daq_data.client import DaqDataClient
from panoseti_grpc.daq_data.client import DaqDataClient

# Instantiate the client using a 'with' statement
with DaqDataClient(daq_config_path, network_config_path) as client:
Expand Down Expand Up @@ -243,7 +243,7 @@ returns `StreamImagesResponse.PanoImage` as a Python dictionary with the followi
This example demonstrates a complete workflow: initialize the server for a simulated run and then stream data from it. This pattern is shown in [daq_data_client_demo.ipynb](daq_data_client_demo.ipynb).

```python
from daq_data.client import DaqDataClient
from panoseti_grpc.daq_data.client import DaqDataClient

# 0. Specify configuration file paths
daq_config_path = 'daq_data/config/daq_config_grpc_simulate.json'
Expand Down Expand Up @@ -300,7 +300,7 @@ This example demonstrates how to use the AioDaqDataClient to initialize a simula

```python
import asyncio
from daq_data.client import AioDaqDataClient
from panoseti_grpc.daq_data.client import AioDaqDataClient

async def main():
# 0. Specify configuration file paths
Expand Down Expand Up @@ -355,7 +355,7 @@ When a `stop_event` (an `asyncio.Event` object) is passed to the client's constr
```python
import asyncio
import signal
from daq_data.client import AioDaqDataClient
from panoseti_grpc.daq_data.client import AioDaqDataClient

async def main():
# 1. Create a shutdown event
Expand Down Expand Up @@ -443,22 +443,22 @@ Below is an example workflow for using `daq_data/client_cli.py` to view real-tim
#### On each DAQ Node in `/path/to/daq_config.json`
1. Set up the `grpc-py39` environment as described above.
2. Set the working directory to `panoseti_grpc/`.
3. Run `python -m daq_data.server`.
3. Run `python -m panoseti_grpc.daq_data.server`.

#### On Any Computer
1. Update `hp_io_config.json` or create a new one (see docs below).
2. Set your working directory to `panoseti_grpc/`.
3. Set up the `grpc-py39` environment as described above and activate it.
4. `export DAQ_CFG=/path/to/daq_config.json`: (optional) create a convenient variable for `/path/to/daq_config.json`. If you don't want to do this, replace `$DAQ_CFG` in all following commands with `/path/to/daq_config.json`.
5. `export NET_CFG=/path/to/network_config.json`: (optional) create a convenient variable for `/path/to/network_config.json`. If you don't want to do this, replace `$NET_CFG` in all following commands with `/path/to/network_config.json`.
6. `python -m daq_data.cli -h`: see the available options.
7. `python -m daq_data.cli $DAQ_CFG $NET_CFG --list-hosts`: find DAQ node hosts running valid DaqData gRPC servers. Hostname arguments `H` to `--host` should be in the list of valid hosts returned by this command.
6. `python -m panoseti_grpc.daq_data.cli -h`: see the available options.
7. `python -m panoseti_grpc.daq_data.cli $DAQ_CFG $NET_CFG --list-hosts`: find DAQ node hosts running valid DaqData gRPC servers. Hostname arguments `H` to `--host` should be in the list of valid hosts returned by this command.
8. Initialize the `hp_io` thread on all DaqData servers:
- (Real data) `python -m daq_data.cli $DAQ_CFG $NET_CFG --init /path/to/hp_io_config.json`: initialize `hp_io` from `hp_io_config.json`. See [The hp_io_config.json File](#the-hp_io_configjson-file) for details about this config file.
- (Simulated data) `python -m daq_data.cli $DAQ_CFG $NET_CFG --init-sim`: initialize `hp_io` from `daq_data/config/hp_io_config_simulate.json`. This starts a stream of simulated data.
- (Real data) `python -m panoseti_grpc.daq_data.cli $DAQ_CFG $NET_CFG --init /path/to/hp_io_config.json`: initialize `hp_io` from `hp_io_config.json`. See [The hp_io_config.json File](#the-hp_io_configjson-file) for details about this config file.
- (Simulated data) `python -m panoseti_grpc.daq_data.cli $DAQ_CFG $NET_CFG --init-sim`: initialize `hp_io` from `daq_data/config/hp_io_config_simulate.json`. This starts a stream of simulated data.
9. Start visualization apps:
- `python -m daq_data.cli $DAQ_CFG $NET_CFG --plot-phdist`: make a `StreamImages` request and launch a real-time pulse-height distribution app.
- `python -m daq_data.cli $DAQ_CFG $NET_CFG --plot-view`: make a `StreamImages` request and launch a real-time frame viewer app.
- `python -m panoseti_grpc.daq_data.cli $DAQ_CFG $NET_CFG --plot-phdist`: make a `StreamImages` request and launch a real-time pulse-height distribution app.
- `python -m panoseti_grpc.daq_data.cli $DAQ_CFG $NET_CFG --plot-view`: make a `StreamImages` request and launch a real-time frame viewer app.

Commands organized below for convenience:
```bash
Expand All @@ -470,25 +470,25 @@ export DAQ_CFG=/path/to/daq_config.json
export NET_CFG=/path/to/network_config.json

# 6. see available options
python -m daq_data.cli -h
python -m panoseti_grpc.daq_data.cli -h

# 7. check gRPC server status
python -m daq_data.cli $DAQ_CFG $NET_CFG --list-hosts
python -m panoseti_grpc.daq_data.cli $DAQ_CFG $NET_CFG --list-hosts

# 8. Initialize the hp_io thread on all DaqData servers (choose one)
python -m daq_data.cli $DAQ_CFG $NET_CFG --init /path/to/hp_io_config.json # real run
python -m daq_data.cli $DAQ_CFG $NET_CFG --init-sim # simulated run
python -m panoseti_grpc.daq_data.cli $DAQ_CFG $NET_CFG --init /path/to/hp_io_config.json # real run
python -m panoseti_grpc.daq_data.cli $DAQ_CFG $NET_CFG --init-sim # simulated run

# 9. Start visualization apps (choose one)
python -m daq_data.cli $DAQ_CFG $NET_CFG --plot-phdist # pulse-height distribution
python -m daq_data.cli $DAQ_CFG $NET_CFG --plot-view # frame viewer
python -m panoseti_grpc.daq_data.cli $DAQ_CFG $NET_CFG --plot-phdist # pulse-height distribution
python -m panoseti_grpc.daq_data.cli $DAQ_CFG $NET_CFG --plot-view # frame viewer
```


Notes:
- On Linux, the `Ctrl+P` keyboard shortcut loads commands from your command history. Useful for running the `python -m daq_data.cli` module with different options.
- On Linux, the `Ctrl+P` keyboard shortcut loads commands from your command history. Useful for running the `python -m panoseti_grpc.daq_data.cli` module with different options.
- `panoseti_grpc` has a package structure, so your working directory should be the repo root, `panoseti_grpc/`, when running modules in `panoseti_grpc/daq_data/`.
- Each script (e.g. `server.py`) should be prefixed with **`python -m daq_data.`** and, because it is a module, be called without the `.py` extension. Following these guidelines gives the example command: **`python -m daq_data.server`**, instead of `daq_data/server.py` or `python -m daq_data.server.py`.
- Each script (e.g. `server.py`) should be prefixed with **`python -m daq_data.`** and, because it is a module, be called without the `.py` extension. Following these guidelines gives the example command: **`python -m panoseti_grpc.daq_data.server`**, instead of `daq_data/server.py` or `python -m panoseti_grpc.daq_data.server.py`.

# The DaqData Service
See [daq_data.proto](protos/daq_data.proto) for the protobuf specification of this service.
Expand Down
32 changes: 19 additions & 13 deletions src/panoseti_grpc/generated/telemetry_pb2.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

43 changes: 43 additions & 0 deletions src/panoseti_grpc/generated/telemetry_pb2_grpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,11 @@ def __init__(self, channel):
Args:
channel: A grpc.Channel.
"""
self.Log = channel.unary_unary(
'/panoseti.telemetry.Telemetry/Log',
request_serializer=telemetry__pb2.LogMessage.SerializeToString,
response_deserializer=telemetry__pb2.StatusResponse.FromString,
_registered_method=True)
self.ReportStatus = channel.unary_unary(
'/panoseti.telemetry.Telemetry/ReportStatus',
request_serializer=telemetry__pb2.StatusRequest.SerializeToString,
Expand All @@ -44,6 +49,12 @@ def __init__(self, channel):
class TelemetryServicer(object):
"""Missing associated documentation comment in .proto file."""

def Log(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')

def ReportStatus(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
Expand All @@ -53,6 +64,11 @@ def ReportStatus(self, request, context):

def add_TelemetryServicer_to_server(servicer, server):
rpc_method_handlers = {
'Log': grpc.unary_unary_rpc_method_handler(
servicer.Log,
request_deserializer=telemetry__pb2.LogMessage.FromString,
response_serializer=telemetry__pb2.StatusResponse.SerializeToString,
),
'ReportStatus': grpc.unary_unary_rpc_method_handler(
servicer.ReportStatus,
request_deserializer=telemetry__pb2.StatusRequest.FromString,
Expand All @@ -69,6 +85,33 @@ def add_TelemetryServicer_to_server(servicer, server):
class Telemetry(object):
"""Missing associated documentation comment in .proto file."""

@staticmethod
def Log(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(
request,
target,
'/panoseti.telemetry.Telemetry/Log',
telemetry__pb2.LogMessage.SerializeToString,
telemetry__pb2.StatusResponse.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True)

@staticmethod
def ReportStatus(request,
target,
Expand Down
2 changes: 1 addition & 1 deletion src/panoseti_grpc/panoseti_util/control_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -458,7 +458,7 @@ def get_quabo_ip_port(ip_addr, i, network_config):
break
return ip_ports

# attach port forwarding info to daq config based on network_config
# attach port forwarding info to daq config based on network_config
def attach_daq_config(daq_config, network_config):
for i in range(len(daq_config['daq_nodes'])):
daq = daq_config['daq_nodes'][i]
Expand Down
Loading