Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
126 changes: 126 additions & 0 deletions colmi_r02_client/big_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@

import logging
from dataclasses import dataclass
from typing import List
from colmi_r02_client.packet import make_packet

CMD_BIG_DATA = 188
BIG_DATA_SLEEP = 39

logger = logging.getLogger(__name__)


def read_sleep_bigdata_packet() -> bytearray:
"""
Build BigDataRequest packet for sleep:
struct BigDataRequest {
uint8_t bigDataMagic = 188;
uint8_t dataId = 39;
uint16_t dataLen = 0;
uint16_t crc16 = 0xFFFF;
}
"""
packet = bytearray([
BIG_DATA_SLEEP,
0x00, 0x00, # dataLen = 0
0xFF, 0xFF # crc16 = 0xFFFF
])
return make_packet(CMD_BIG_DATA, packet)


def parse_bigdata_response(packet: bytearray):
"""
struct BigDataResponse {
uint8_t bigDataMagic = 188;
uint8_t dataId;
uint16_t dataLen;
uint16_t crc16;
// Variable data length
}
"""
if len(packet) < 7 or packet[0] != CMD_BIG_DATA:
logger.warning("Invalid BigData packet")
return None
if packet[1] == BIG_DATA_SLEEP:
return parse_bigdata_sleep_response(packet)
else:
logger.warning(f"Unhandled BigData packet dataId: {packet[1]}")
return None


# Dataclasses for sleep data
@dataclass
class SleepPeriod:
type: int
minutes: int

@dataclass
class SleepDay:
daysAgo: int
sleepStart: int
sleepEnd: int
periods: List[SleepPeriod]

def parse_bigdata_sleep_response(packet: bytearray) -> List[SleepDay] | None:
"""
struct SleepData {
uint8_t bigDataMagic = 188;
uint8_t sleepId = 39;
uint16_t dataLen;
uint16_t crc16;
uint8_t sleepDays;
SleepDay days[];
}
struct SleepDay {
uint8_t daysAgo;
uint8_t curDayBytes;
int16_t sleepStart; // Minutes after midnight
int16_t sleepEnd; // Minutes after midnight
SleepPeriod sleepPeriods[];
}
struct SleepPeriod {
SleepType type;
uint8_t minutes;
}
enum SleepType : uint8_t {
Copy link
Copy Markdown
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you pull this into IntEnum like https://github.com/tahnok/colmi_r02_client/blob/firehose/colmi_r02_client/real_time.py#L16 ? I much prefer REM to 4 when looking through stuff

NODATA = 0,
ERROR = 1,
LIGHT = 2,
DEEP = 3,
REM = 4,
AWAKE = 5,
}
"""
if len(packet) < 7 or packet[0] != CMD_BIG_DATA or packet[1] != BIG_DATA_SLEEP:
logger.warning("Invalid BigData sleep packet")
return None
data_len = int.from_bytes(packet[2:4], 'little')
crc16 = int.from_bytes(packet[4:6], 'little')
Copy link
Copy Markdown
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This works fine, no need to change, but have you seen struct ?

sleep_days = packet[6]
idx = 7
days: List[SleepDay] = []
for _ in range(sleep_days):
if idx + 6 > len(packet):
logger.warning("Packet too short for another SleepDay")
break
daysAgo = packet[idx]
curDayBytes = packet[idx+1]
sleepStart = int.from_bytes(packet[idx+2:idx+4], 'little', signed=True)
sleepEnd = int.from_bytes(packet[idx+4:idx+6], 'little', signed=True)
periods: List[SleepPeriod] = []
period_idx = idx+6
while period_idx < idx+1+curDayBytes:
if period_idx+2 > len(packet):
break
sleep_type = packet[period_idx]
minutes = packet[period_idx+1]
periods.append(SleepPeriod(type=sleep_type, minutes=minutes))
period_idx += 2
days.append(SleepDay(
daysAgo=daysAgo,
sleepStart=sleepStart,
sleepEnd=sleepEnd,
periods=periods
))
idx += 1 + curDayBytes
return days
86 changes: 81 additions & 5 deletions colmi_r02_client/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,16 @@
from bleak import BleakClient
from bleak.backends.characteristic import BleakGATTCharacteristic

from colmi_r02_client import battery, date_utils, steps, set_time, blink_twice, hr, hr_settings, packet, reboot, real_time
from colmi_r02_client import battery, date_utils, steps, set_time, blink_twice, hr, hr_settings, packet, reboot, real_time, big_data

UART_SERVICE_UUID = "6E40FFF0-B5A3-F393-E0A9-E50E24DCCA9E"
UART_RX_CHAR_UUID = "6E400002-B5A3-F393-E0A9-E50E24DCCA9E"
UART_TX_CHAR_UUID = "6E400003-B5A3-F393-E0A9-E50E24DCCA9E"

UART_SERVICE_V2_UUID = "DE5BF728-D711-4E47-AF26-65E3012A5DC7"
UART_RX_CHAR_V2_UUID = "DE5BF72A-D711-4E47-AF26-65E3012A5DC7"
UART_TX_CHAR_V2_UUID = "DE5BF729-D711-4E47-AF26-65E3012A5DC7"
Copy link
Copy Markdown
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Gonna have to think about this, it's a bit more complex then I like


DEVICE_INFO_UUID = "0000180A-0000-1000-8000-00805F9B34FB"
DEVICE_HW_UUID = "00002A27-0000-1000-8000-00805F9B34FB"
DEVICE_FW_UUID = "00002A26-0000-1000-8000-00805F9B34FB"
Expand All @@ -38,6 +42,7 @@ class FullData:
address: str
heart_rates: list[hr.HeartRateLog | hr.NoData]
sport_details: list[list[steps.SportDetail] | steps.NoData]
sleep_logs: list[big_data.SleepDay | None]


COMMAND_HANDLERS: dict[int, Callable[[bytearray], Any]] = {
Expand All @@ -48,6 +53,7 @@ class FullData:
hr.CMD_READ_HEART_RATE: hr.HeartRateLogParser().parse,
set_time.CMD_SET_TIME: empty_parse,
hr_settings.CMD_HEART_RATE_LOG_SETTINGS: hr_settings.parse_heart_rate_log_settings,
big_data.CMD_BIG_DATA: big_data.parse_bigdata_response,
}
"""
TODO put these somewhere nice
Expand Down Expand Up @@ -92,9 +98,62 @@ async def connect(self):
rx_char = nrf_uart_service.get_characteristic(UART_RX_CHAR_UUID)
assert rx_char
self.rx_char = rx_char

await self.bleak_client.start_notify(UART_TX_CHAR_UUID, self._handle_tx)

nrf_uart_service_v2 = self.bleak_client.services.get_service(UART_SERVICE_V2_UUID)
assert nrf_uart_service_v2
rx_char_v2 = nrf_uart_service_v2.get_characteristic(UART_RX_CHAR_V2_UUID)
assert rx_char_v2
self.rx_char_v2 = rx_char_v2

# Buffer for big data packets
self._bigdata_buffer = None
self._bigdata_expected_size = None
await self.bleak_client.start_notify(UART_TX_CHAR_V2_UUID, self._handle_tx_v2)

def _handle_tx_v2(self, _: BleakGATTCharacteristic, packet: bytearray) -> None:
logger.info(f"Received V2 packet {packet}")
if packet[0] == big_data.CMD_BIG_DATA:
# If already buffering, append
if self._bigdata_buffer is not None:
self._bigdata_buffer += packet
if len(self._bigdata_buffer) < self._bigdata_expected_size + 6:
# Not enough data yet
return
packet = self._bigdata_buffer
self._bigdata_buffer = None
self._bigdata_expected_size = None

# If this is a new packet, check if it's complete
else:
if len(packet) < 6:
logger.warning("BigData packet too short for header")
return
data_len = int.from_bytes(packet[2:4], 'little')
expected_total = data_len + 6
if len(packet) < expected_total:
# Start buffering
self._bigdata_buffer = bytearray(packet)
self._bigdata_expected_size = data_len
return

# Now we have a complete packet
if big_data.CMD_BIG_DATA in COMMAND_HANDLERS:
result = COMMAND_HANDLERS[big_data.CMD_BIG_DATA](packet)
if result is not None:
self.queues[big_data.CMD_BIG_DATA].put_nowait(result)
else:
logger.debug("No result returned from parser for big data")
else:
logger.warning("No handler for big data packet")
else:
logger.warning(f"Received V2 packet with unknown type: {packet.hex()}")

if self.record_to is not None:
with self.record_to.open("ab") as f:
f.write(packet)
f.write(b"\n")

async def disconnect(self):
await self.bleak_client.disconnect()

Expand All @@ -121,9 +180,12 @@ def _handle_tx(self, _: BleakGATTCharacteristic, packet: bytearray) -> None:
f.write(packet)
f.write(b"\n")

async def send_packet(self, packet: bytearray) -> None:
async def send_packet(self, packet: bytearray, is_v2: bool = False) -> None:
logger.debug(f"Sending packet: {packet}")
await self.bleak_client.write_gatt_char(self.rx_char, packet, response=False)
if is_v2:
await self.bleak_client.write_gatt_char(self.rx_char_v2, packet, response=False)
else:
await self.bleak_client.write_gatt_char(self.rx_char, packet, response=False)

async def get_battery(self) -> battery.BatteryInfo:
await self.send_packet(battery.BATTERY_PACKET)
Expand Down Expand Up @@ -228,6 +290,16 @@ async def get_steps(self, target: datetime, today: datetime | None = None) -> li
timeout=2,
)

async def get_sleep(self):
# big data does not let us specify a date from what I can tell, so we just get the sleep data which
# has "daysAgo" in it based on the current date
await self.send_packet(big_data.read_sleep_bigdata_packet(), is_v2=True)
res = await asyncio.wait_for(
self.queues[big_data.CMD_BIG_DATA].get(),
timeout=2,
)
return res

async def reboot(self) -> None:
await self.send_packet(reboot.REBOOT_PACKET)

Expand All @@ -252,8 +324,12 @@ async def get_full_data(self, start: datetime, end: datetime) -> FullData:
"""
heart_rate_logs = []
sport_detail_logs = []
sleep_logs = []
for d in date_utils.dates_between(start, end):
heart_rate_logs.append(await self.get_heart_rate_log(d))
sport_detail_logs.append(await self.get_steps(d))

if sleep_data := await self.get_sleep():
sleep_logs.extend(sleep_data)

return FullData(self.address, heart_rates=heart_rate_logs, sport_details=sport_detail_logs)
return FullData(self.address, heart_rates=heart_rate_logs, sport_details=sport_detail_logs, sleep_logs=sleep_logs)
55 changes: 52 additions & 3 deletions colmi_r02_client/db.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
from datetime import datetime, timezone
from datetime import datetime, timezone, timedelta
from pathlib import Path
import logging
import json
from typing import Any

from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, Session, relationship
from sqlalchemy import select, UniqueConstraint, ForeignKey, create_engine, event, func, types
from sqlalchemy.engine import Engine, Dialect

from colmi_r02_client import hr, steps
from colmi_r02_client import hr, steps, big_data
from colmi_r02_client.client import FullData
from colmi_r02_client.date_utils import start_of_day, end_of_day
from colmi_r02_client.date_utils import start_of_day, end_of_day, now

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -63,6 +64,7 @@ class Ring(Base):
heart_rates: Mapped[list["HeartRate"]] = relationship(back_populates="ring")
sport_details: Mapped[list["SportDetail"]] = relationship(back_populates="ring")
syncs: Mapped[list["Sync"]] = relationship(back_populates="ring")
sleep_logs: Mapped[list["SleepLog"]] = relationship(back_populates="ring")


class Sync(Base):
Expand All @@ -74,6 +76,20 @@ class Sync(Base):
ring: Mapped["Ring"] = relationship(back_populates="syncs")
heart_rates: Mapped[list["HeartRate"]] = relationship(back_populates="sync")
sport_details: Mapped[list["SportDetail"]] = relationship(back_populates="sync")
sleep_logs: Mapped[list["SleepLog"]] = relationship(back_populates="sync")

class SleepLog(Base):
__tablename__ = "sleep_logs"
__table_args__ = (UniqueConstraint("ring_id", "date"),)
sleep_log_id: Mapped[int] = mapped_column(primary_key=True)
date = mapped_column(DateTimeInUTC(timezone=True), nullable=False)
ring_id = mapped_column(ForeignKey("rings.ring_id"), nullable=False)
ring: Mapped["Ring"] = relationship(back_populates="sleep_logs")
sync_id = mapped_column(ForeignKey("syncs.sync_id"), nullable=False)
sync: Mapped["Sync"] = relationship(back_populates="sleep_logs")
sleep_start: Mapped[int]
sleep_end: Mapped[int]
periods: Mapped[str] # JSON string of sleep periods


class HeartRate(Base):
Expand Down Expand Up @@ -152,6 +168,7 @@ def full_sync(session: Session, data: FullData) -> None:

_add_heart_rate(sync, ring, data, session)
_add_sport_details(sync, ring, data, session)
_add_sleep_logs(sync, ring, data, session)
session.commit()


Expand Down Expand Up @@ -221,5 +238,37 @@ def _add_sport_details(sync: Sync, ring: Ring, data: FullData, session: Session)
session.add(s)


def _add_sleep_logs(sync: Sync, ring: Ring, data: FullData, session: Session) -> None:
if not hasattr(data, "sleep_logs") or not data.sleep_logs:
return
logger.info(f"Adding {len(data.sleep_logs)} days of sleep logs")
for log in data.sleep_logs:
if not log or not isinstance(log, big_data.SleepDay):
continue
# Calculate the date from daysAgo
date = (now() - timedelta(days=log.daysAgo)).replace(hour=0, minute=0, second=0, microsecond=0)
# Check for existing log
existing = session.scalars(
select(SleepLog).where(SleepLog.ring_id == ring.ring_id, SleepLog.date == date)
).one_or_none()
# Convert periods to list of dicts for JSON
periods_json = json.dumps([{'type': p.type, 'minutes': p.minutes} for p in log.periods])
if existing:
existing.sleep_start = log.sleepStart
existing.sleep_end = log.sleepEnd
existing.periods = periods_json
session.add(existing)
else:
s = SleepLog(
date=date,
ring=ring,
sync=sync,
sleep_start=log.sleepStart,
sleep_end=log.sleepEnd,
periods=periods_json,
)
session.add(s)


def get_last_sync(session: Session, ring_address: str) -> datetime | None:
return session.scalars(select(func.max(Sync.timestamp)).join(Ring).where(Ring.address == ring_address)).one_or_none()
14 changes: 14 additions & 0 deletions tests/database_schema.sql
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,20 @@ CREATE TABLE syncs (
FOREIGN KEY(ring_id) REFERENCES rings (ring_id)
)

CREATE TABLE sleep_logs (
sleep_log_id INTEGER NOT NULL,
date DATETIME NOT NULL,
ring_id INTEGER NOT NULL,
sync_id INTEGER NOT NULL,
sleep_start INTEGER NOT NULL,
sleep_end INTEGER NOT NULL,
periods VARCHAR NOT NULL,
PRIMARY KEY (sleep_log_id),
UNIQUE (ring_id, date),
FOREIGN KEY(ring_id) REFERENCES rings (ring_id),
FOREIGN KEY(sync_id) REFERENCES syncs (sync_id)
)

CREATE TABLE heart_rates (
heart_rate_id INTEGER NOT NULL,
reading INTEGER NOT NULL,
Expand Down
Loading