Skip to content

Commit 3c5b45c

Browse files
Ken LippoldKen Lippold
authored andcommitted
API updates
1 parent ecc59f7 commit 3c5b45c

File tree

15 files changed

+221
-179
lines changed

15 files changed

+221
-179
lines changed

iam/models/workspace.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def can_user_create(cls, user: Optional["User"]):
6464
return user.account_type != "limited"
6565

6666
def get_user_permissions(self, user: Optional["User"]) -> list[Literal["edit", "delete", "view"]]:
67-
if user == self.owner or user.account_type == "admin":
67+
if user and (user == self.owner or user.account_type == "admin"):
6868
return ["view", "edit", "delete"]
6969
elif self.is_private is False or self.collaborators.filter(user=user).exists():
7070
return ["view"]

sta/schemas/sensor.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,14 @@
11
import uuid
22
from typing import Optional
33
from ninja import Schema, Field
4+
from sta.schemas.sensorthings.sensor import sensorEncodingTypes
45
from hydroserver.schemas import BaseGetResponse, BasePostBody, BasePatchBody
56

67

78
class SensorFields(Schema):
89
name: str = Field(..., max_length=255)
910
description: str
10-
encoding_type: str = Field(..., max_length=255)
11+
encoding_type: sensorEncodingTypes = Field(..., max_length=255)
1112
manufacturer: Optional[str] = Field(None, max_length=255)
1213
sensor_model: Optional[str] = Field(None, max_length=255, alias="model")
1314
sensor_model_link: Optional[str] = Field(None, max_length=500, alias="modelLink")

sta/schemas/sensorthings/datastream.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
from pydantic import ConfigDict
33
from pydantic.alias_generators import to_camel
44
from typing import Optional, Literal
5+
from uuid import UUID
56
from sensorthings.components.datastreams.schemas import (DatastreamGetResponse as DefaultDatastreamGetResponse,
67
DatastreamListResponse as DefaultDatastreamListResponse)
78
from .workspace import WorkspaceProperties
@@ -14,11 +15,15 @@ class DatastreamProperties(Schema):
1415
value_count: Optional[int] = None
1516
no_data_value: float
1617
processing_level_code: str
18+
processing_level_id: UUID
19+
unit_id: UUID
1720
intended_time_spacing: Optional[float] = None
1821
intended_time_spacing_unit_of_measurement: Optional[Literal["seconds", "minutes", "hours", "days"]] = None
1922
aggregation_statistic: Optional[str] = None
2023
time_aggregation_interval: float
2124
time_aggregation_interval_unit_of_measurement: Literal["seconds", "minutes", "hours", "days"]
25+
is_private: bool
26+
is_visible: bool
2227
workspace: WorkspaceProperties
2328

2429
model_config = ConfigDict(populate_by_name=True, str_strip_whitespace=True, alias_generator=to_camel)

sta/schemas/sensorthings/location.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22
from pydantic import ConfigDict
33
from pydantic.alias_generators import to_camel
44
from typing import Optional
5-
from datetime import datetime
65
from sensorthings.components.locations.schemas import (LocationGetResponse as DefaultLocationGetResponse,
76
LocationListResponse as DefaultLocationListResponse)
87
from .workspace import WorkspaceProperties
@@ -13,7 +12,6 @@ class LocationProperties(Schema):
1312
county: Optional[str] = None
1413
elevation_m: Optional[float] = Field(None, alias="elevation_m")
1514
elevation_datum: Optional[str] = None
16-
last_updated: Optional[datetime] = None
1715
workspace: WorkspaceProperties
1816

1917
model_config = ConfigDict(populate_by_name=True, str_strip_whitespace=True, alias_generator=to_camel)

sta/schemas/sensorthings/thing.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ class ThingProperties(Schema):
1515
data_disclaimer: Optional[str] = None
1616
is_private: bool
1717
workspace: WorkspaceProperties
18-
photos: list[AnyHttpUrlString]
18+
photos: dict[str, AnyHttpUrlString]
1919
tags: dict[str, str]
2020

2121
model_config = ConfigDict(populate_by_name=True, str_strip_whitespace=True, alias_generator=to_camel)

sta/schemas/thing.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
from country_list import countries_for_language
66
from hydroserver.schemas import BaseGetResponse, BasePostBody, BasePatchBody
77
from .tag import TagGetResponse
8+
from .photo import PhotoGetResponse
89

910
valid_country_codes = [code for code, _ in countries_for_language('en')]
1011

@@ -46,6 +47,7 @@ class ThingGetResponse(BaseGetResponse, ThingFields, LocationFields):
4647
id: uuid.UUID
4748
workspace_id: uuid.UUID
4849
tags: list[TagGetResponse]
50+
photos: list[PhotoGetResponse]
4951

5052

5153
class ThingPostBody(BasePostBody, ThingFields, LocationFields):

sta/services/sensorthings/datastream.py

Lines changed: 51 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
from uuid import UUID
22
from typing import Optional
33
from ninja.errors import HttpError
4+
from django.db.utils import DataError, DatabaseError
45
from sta.models import Datastream
56
from sensorthings.components.datastreams.engine import DatastreamBaseEngine
67
from sensorthings.components.datastreams.schemas import (Datastream as DatastreamSchema, DatastreamPostBody,
@@ -60,26 +61,19 @@ def get_datastreams(
6061
count = None
6162

6263
if thing_ids:
63-
datastreams = self.apply_rank(
64-
component=DatastreamSchema,
64+
datastreams = self.apply_window(
6565
queryset=datastreams,
6666
partition_field="thing_id",
67-
filter_ids=thing_ids,
68-
max_records=1
6967
)
7068
elif sensor_ids:
71-
datastreams = self.apply_rank(
72-
component=DatastreamSchema,
69+
datastreams = self.apply_window(
7370
queryset=datastreams,
7471
partition_field="sensor_id",
75-
filter_ids=sensor_ids
7672
)
7773
elif observed_property_ids:
78-
datastreams = self.apply_rank(
79-
component=DatastreamSchema,
74+
datastreams = self.apply_window(
8075
queryset=datastreams,
8176
partition_field="observed_property_id",
82-
filter_ids=observed_property_ids
8377
)
8478
else:
8579
if pagination:
@@ -90,47 +84,54 @@ def get_datastreams(
9084
)
9185
datastreams = datastreams.all()
9286

93-
return {
94-
datastream.id: {
95-
"id": datastream.id,
96-
"name": str(datastream.name),
97-
"description": datastream.description,
98-
"thing_id": datastream.thing_id,
99-
"sensor_id": datastream.sensor_id,
100-
"observed_property_id": datastream.observed_property_id,
101-
"unit_of_measurement": {
102-
"name": datastream.unit.name,
103-
"symbol": datastream.unit.symbol,
104-
"definition": datastream.unit.definition.split(";")[0]
105-
},
106-
"observation_type": datastream.observation_type,
107-
"phenomenon_time": getattr(self, "iso_time_interval")(
108-
datastream.phenomenon_begin_time, datastream.phenomenon_end_time
109-
),
110-
"result_time": getattr(self, "iso_time_interval")(
111-
datastream.result_begin_time, datastream.result_end_time
112-
),
113-
"properties": {
114-
"result_type": datastream.result_type,
115-
"status": datastream.status,
116-
"sampled_medium": datastream.sampled_medium,
117-
"value_count": datastream.value_count,
118-
"no_data_value": datastream.no_data_value,
119-
"processing_level_code": datastream.processing_level.code,
120-
"intended_time_spacing": datastream.intended_time_spacing,
121-
"intended_time_spacing_unit_of_measurement": datastream.intended_time_spacing_unit,
122-
"aggregation_statistic": datastream.aggregation_statistic,
123-
"time_aggregation_interval": datastream.time_aggregation_interval,
124-
"time_aggregation_interval_unit_of_measurement": datastream.time_aggregation_interval_unit,
125-
"workspace": {
126-
"id": datastream.thing.workspace.id,
127-
"name": datastream.thing.workspace.name,
128-
"link": datastream.thing.workspace.link,
129-
"is_private": datastream.thing.workspace.is_private
87+
try:
88+
return {
89+
datastream.id: {
90+
"id": datastream.id,
91+
"name": str(datastream.name),
92+
"description": datastream.description,
93+
"thing_id": datastream.thing_id,
94+
"sensor_id": datastream.sensor_id,
95+
"observed_property_id": datastream.observed_property_id,
96+
"unit_of_measurement": {
97+
"name": datastream.unit.name,
98+
"symbol": datastream.unit.symbol,
99+
"definition": datastream.unit.definition.split(";")[0]
100+
},
101+
"observation_type": datastream.observation_type,
102+
"phenomenon_time": getattr(self, "iso_time_interval")(
103+
datastream.phenomenon_begin_time, datastream.phenomenon_end_time
104+
),
105+
"result_time": getattr(self, "iso_time_interval")(
106+
datastream.result_begin_time, datastream.result_end_time
107+
),
108+
"properties": {
109+
"result_type": datastream.result_type,
110+
"status": datastream.status,
111+
"sampled_medium": datastream.sampled_medium,
112+
"value_count": datastream.value_count,
113+
"no_data_value": datastream.no_data_value,
114+
"processing_level_code": datastream.processing_level.code,
115+
"processing_level_id": datastream.processing_level.id,
116+
"unit_id": datastream.unit.id,
117+
"intended_time_spacing": datastream.intended_time_spacing,
118+
"intended_time_spacing_unit_of_measurement": datastream.intended_time_spacing_unit,
119+
"aggregation_statistic": datastream.aggregation_statistic,
120+
"time_aggregation_interval": datastream.time_aggregation_interval,
121+
"time_aggregation_interval_unit_of_measurement": datastream.time_aggregation_interval_unit,
122+
"is_private": datastream.is_private,
123+
"is_visible": datastream.is_visible,
124+
"workspace": {
125+
"id": datastream.thing.workspace.id,
126+
"name": datastream.thing.workspace.name,
127+
"link": datastream.thing.workspace.link,
128+
"is_private": datastream.thing.workspace.is_private
129+
}
130130
}
131-
}
132-
} for datastream in datastreams
133-
}, count
131+
} for datastream in datastreams
132+
}, count
133+
except(DatabaseError, DataError,) as e:
134+
raise HttpError(400, str(e))
134135

135136
def create_datastream(
136137
self,

sta/services/sensorthings/location.py

Lines changed: 36 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from typing import Optional
22
from ninja.errors import HttpError
3+
from django.db.utils import DataError, DatabaseError
34
from sta.models import Location
45
from sensorthings.components.locations.engine import LocationBaseEngine
56
from sensorthings.components.locations.schemas import Location as LocationSchema, LocationPostBody, LocationPatchBody
@@ -62,38 +63,41 @@ def get_locations(
6263
skip=pagination.get("skip")
6364
)
6465

65-
return {
66-
location.id: {
67-
"id": location.id,
68-
"name": location.name,
69-
"description": location.description,
70-
"encoding_type": location.encoding_type,
71-
"location": {
72-
"type": "Feature",
73-
"properties": {},
74-
"geometry": {
75-
"type": "Point",
76-
"coordinates": [
77-
location.latitude,
78-
location.longitude
79-
]
80-
}
81-
},
82-
"properties": {
83-
"elevation_m": location.elevation_m,
84-
"elevation_datum": location.elevation_datum,
85-
"state": location.state,
86-
"county": location.county,
87-
"workspace": {
88-
"id": location.thing.workspace.id,
89-
"name": location.thing.workspace.name,
90-
"link": location.thing.workspace.link,
91-
"is_private": location.thing.workspace.is_private
92-
}
93-
},
94-
"thing_ids": [location.thing_id]
95-
} for location in locations
96-
}, count
66+
try:
67+
return {
68+
location.id: {
69+
"id": location.id,
70+
"name": location.name,
71+
"description": location.description,
72+
"encoding_type": location.encoding_type,
73+
"location": {
74+
"type": "Feature",
75+
"properties": {},
76+
"geometry": {
77+
"type": "Point",
78+
"coordinates": [
79+
location.latitude,
80+
location.longitude
81+
]
82+
}
83+
},
84+
"properties": {
85+
"elevation_m": location.elevation_m,
86+
"elevation_datum": location.elevation_datum,
87+
"state": location.state,
88+
"county": location.county,
89+
"workspace": {
90+
"id": location.thing.workspace.id,
91+
"name": location.thing.workspace.name,
92+
"link": location.thing.workspace.link,
93+
"is_private": location.thing.workspace.is_private
94+
}
95+
},
96+
"thing_ids": [location.thing_id]
97+
} for location in locations
98+
}, count
99+
except (DatabaseError, DataError,) as e:
100+
raise HttpError(400, str(e))
97101

98102
def create_location(
99103
self,

sta/services/sensorthings/observation.py

Lines changed: 25 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,8 @@
22
from uuid import UUID
33
from datetime import datetime
44
from typing import Optional
5-
from django.db.utils import IntegrityError
5+
from django.db.utils import IntegrityError, DatabaseError, DataError
6+
from psycopg.errors import UniqueViolation
67
from ninja.errors import HttpError
78
from sta.models import Observation, Datastream
89
from sensorthings.components.observations.engine import ObservationBaseEngine
@@ -102,25 +103,28 @@ def get_observations(
102103
# for result_qualifier in result_qualifiers
103104
# }
104105

105-
return {
106-
observation.id: {
107-
"id": observation.id,
108-
"phenomenon_time": str(observation.phenomenon_time),
109-
"result": observation.result,
110-
"result_time": str(observation.result_time) if observation.result_time else None,
111-
"datastream_id": observation.datastream_id,
112-
"result_quality": None
113-
# "result_quality": {
114-
# "quality_code": observation.quality_code,
115-
# "result_qualifiers": [
116-
# {
117-
# "code": result_qualifiers.get(result_qualifier).code,
118-
# "description": result_qualifiers.get(result_qualifier).description
119-
# } for result_qualifier in observation.result_qualifiers
120-
# ] if observation.result_qualifiers is not None else []
121-
# }
122-
} for observation in observations
123-
}, count
106+
try:
107+
return {
108+
observation.id: {
109+
"id": observation.id,
110+
"phenomenon_time": str(observation.phenomenon_time),
111+
"result": observation.result,
112+
"result_time": str(observation.result_time) if observation.result_time else None,
113+
"datastream_id": observation.datastream_id,
114+
"result_quality": None
115+
# "result_quality": {
116+
# "quality_code": observation.quality_code,
117+
# "result_qualifiers": [
118+
# {
119+
# "code": result_qualifiers.get(result_qualifier).code,
120+
# "description": result_qualifiers.get(result_qualifier).description
121+
# } for result_qualifier in observation.result_qualifiers
122+
# ] if observation.result_qualifiers is not None else []
123+
# }
124+
} for observation in observations
125+
}, count
126+
except (DatabaseError, DataError,) as e:
127+
raise HttpError(400, str(e))
124128

125129
def create_observation(
126130
self,
@@ -192,7 +196,7 @@ def create_observations(
192196
)
193197
for observation in datastream_observations
194198
])
195-
except IntegrityError:
199+
except (IntegrityError, UniqueViolation,):
196200
raise HttpError(409, "Duplicate phenomenonTime found on this datastream.")
197201

198202
new_observations.extend(new_observations_for_datastream)

0 commit comments

Comments
 (0)