Skip to content

Commit 2edc52f

Browse files
authored
Error type refactor (#137)
This PR removes the central error type with smaller error types which compose upward to make final response error. Necessary adjustments are made alongside. Fixes #81
1 parent 917a4ec commit 2edc52f

File tree

13 files changed

+415
-413
lines changed

13 files changed

+415
-413
lines changed

server/src/alerts.rs

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,6 @@
1919
use log::{error, info};
2020
use serde::{Deserialize, Serialize};
2121

22-
use crate::error::Error;
23-
2422
#[derive(Default, Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
2523
#[serde(rename_all = "camelCase")]
2624
pub struct Alerts {
@@ -39,7 +37,7 @@ pub struct Alert {
3937
impl Alert {
4038
// TODO: spawn async tasks to call webhooks if alert rules are met
4139
// This is done to ensure that threads aren't blocked by calls to the webhook
42-
pub async fn check_alert(&mut self, event: &serde_json::Value) -> Result<(), Error> {
40+
pub async fn check_alert(&mut self, event: &serde_json::Value) -> Result<(), ()> {
4341
if self.rule.resolves(event).await {
4442
info!("Alert triggered; name: {}", self.name);
4543
for target in self.targets.clone() {

server/src/error.rs

Lines changed: 0 additions & 84 deletions
This file was deleted.

server/src/event.rs

Lines changed: 53 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
*/
1919
use datafusion::arrow;
2020
use datafusion::arrow::datatypes::Schema;
21+
use datafusion::arrow::error::ArrowError;
2122
use datafusion::arrow::ipc::writer::StreamWriter;
2223
use datafusion::arrow::json;
2324
use datafusion::arrow::json::reader::infer_json_schema;
@@ -34,9 +35,9 @@ use std::sync::RwLock;
3435

3536
use crate::metadata;
3637
use crate::option::CONFIG;
37-
use crate::response;
3838
use crate::storage::ObjectStorage;
39-
use crate::Error;
39+
40+
use self::error::EventError;
4041

4142
type LocalWriter = Mutex<Option<StreamWriter<std::fs::File>>>;
4243
type LocalWriterGuard<'a> = MutexGuard<'a, Option<StreamWriter<std::fs::File>>>;
@@ -51,13 +52,13 @@ impl STREAM_WRITERS {
5152
fn append_to_local(stream: &str, record: &RecordBatch) -> Result<(), StreamWriterError> {
5253
let hashmap_guard = STREAM_WRITERS
5354
.read()
54-
.map_err(|_| StreamWriterError::RwPoisioned)?;
55+
.map_err(|_| StreamWriterError::RwPoisoned)?;
5556

5657
match hashmap_guard.get(stream) {
5758
Some(localwriter) => {
5859
let mut writer_guard = localwriter
5960
.lock()
60-
.map_err(|_| StreamWriterError::MutexPoisioned)?;
61+
.map_err(|_| StreamWriterError::MutexPoisoned)?;
6162

6263
// if it's some writer then we write without dropping any lock
6364
// hashmap cannot be brought mutably at any point until this finishes
@@ -85,7 +86,7 @@ impl STREAM_WRITERS {
8586
fn create_entry(stream: String, record: &RecordBatch) -> Result<(), StreamWriterError> {
8687
let mut hashmap_guard = STREAM_WRITERS
8788
.write()
88-
.map_err(|_| StreamWriterError::RwPoisioned)?;
89+
.map_err(|_| StreamWriterError::RwPoisoned)?;
8990

9091
let file = OpenOptions::new()
9192
.append(true)
@@ -109,7 +110,7 @@ impl STREAM_WRITERS {
109110
pub fn delete_entry(stream: &str) -> Result<(), StreamWriterError> {
110111
let mut hashmap_guard = STREAM_WRITERS
111112
.write()
112-
.map_err(|_| StreamWriterError::RwPoisioned)?;
113+
.map_err(|_| StreamWriterError::RwPoisoned)?;
113114

114115
hashmap_guard.remove(stream);
115116

@@ -143,14 +144,14 @@ impl STREAM_WRITERS {
143144
pub fn unset_entry(stream: &str) -> Result<(), StreamWriterError> {
144145
let guard = STREAM_WRITERS
145146
.read()
146-
.map_err(|_| StreamWriterError::RwPoisioned)?;
147+
.map_err(|_| StreamWriterError::RwPoisoned)?;
147148
let stream_writer = match guard.get(stream) {
148149
Some(writer) => writer,
149150
None => return Ok(()),
150151
};
151152
stream_writer
152153
.lock()
153-
.map_err(|_| StreamWriterError::MutexPoisioned)?
154+
.map_err(|_| StreamWriterError::MutexPoisoned)?
154155
.take();
155156

156157
Ok(())
@@ -163,10 +164,10 @@ pub enum StreamWriterError {
163164
Writer(arrow::error::ArrowError),
164165
#[error("Io Error when creating new file: {0}")]
165166
Io(std::io::Error),
166-
#[error("RwLock was poisioned")]
167-
RwPoisioned,
168-
#[error("Mutex was poisioned")]
169-
MutexPoisioned,
167+
#[error("RwLock was poisoned")]
168+
RwPoisoned,
169+
#[error("Mutex was poisoned")]
170+
MutexPoisoned,
170171
}
171172

172173
fn data_file_path(stream_name: &str) -> String {
@@ -189,24 +190,17 @@ pub struct Event {
189190
// Events holds the schema related to a each event for a single log stream
190191

191192
impl Event {
192-
pub async fn process(
193-
&self,
194-
storage: &impl ObjectStorage,
195-
) -> Result<response::EventResponse, Error> {
196-
let inferred_schema = self.infer_schema().map_err(|e| {
197-
error!("Failed to infer schema for event. {:?}", e);
198-
e
199-
})?;
193+
pub async fn process(&self, storage: &impl ObjectStorage) -> Result<(), EventError> {
194+
let inferred_schema = self.infer_schema()?;
200195

201196
let event = self.get_reader(inferred_schema.clone());
202197

203198
let stream_schema = metadata::STREAM_INFO.schema(&self.stream_name)?;
204-
let is_first_event = stream_schema.is_none();
205199

206200
if let Some(existing_schema) = stream_schema {
207201
// validate schema before processing the event
208202
if existing_schema != inferred_schema {
209-
return Err(Error::SchemaMismatch(self.stream_name.clone()));
203+
return Err(EventError::SchemaMismatch(self.stream_name.clone()));
210204
} else {
211205
self.process_event(event)?
212206
}
@@ -221,16 +215,7 @@ impl Event {
221215
error!("Error checking for alerts. {:?}", e);
222216
}
223217

224-
let msg = if is_first_event {
225-
format!(
226-
"Intial Event recieved for log stream {}, schema uploaded successfully",
227-
&self.stream_name,
228-
)
229-
} else {
230-
format!("Event recieved for log stream {}", &self.stream_name)
231-
};
232-
233-
Ok(response::EventResponse { msg })
218+
Ok(())
234219
}
235220

236221
// This is called when the first event of a log stream is received. The first event is
@@ -241,56 +226,42 @@ impl Event {
241226
mut event: json::Reader<R>,
242227
schema: Schema,
243228
storage: &impl ObjectStorage,
244-
) -> Result<u64, Error> {
245-
let rb = event.next()?.ok_or(Error::MissingRecord)?;
229+
) -> Result<u64, EventError> {
230+
let rb = event.next()?.ok_or(EventError::MissingRecord)?;
246231
let stream_name = &self.stream_name;
247232

248233
// Store record batch on local cache
249234
STREAM_WRITERS::create_entry(stream_name.clone(), &rb).unwrap();
250235

251236
// Put the inferred schema to object store
252-
storage
253-
.put_schema(stream_name.clone(), &schema)
254-
.await
255-
.map_err(|e| response::EventError {
256-
msg: format!(
257-
"Failed to upload schema for log stream {} due to err: {}",
258-
stream_name, e
259-
),
260-
})?;
237+
storage.put_schema(stream_name.clone(), &schema).await?;
261238

262239
// set the schema in memory for this stream
263-
metadata::STREAM_INFO
264-
.set_schema(stream_name, schema)
265-
.map_err(|e| response::EventError {
266-
msg: format!(
267-
"Failed to set schema for log stream {} due to err: {}",
268-
stream_name, e
269-
),
270-
})?;
240+
metadata::STREAM_INFO.set_schema(stream_name, schema)?;
271241

272242
Ok(0)
273243
}
274244

275245
// event process all events after the 1st event. Concatenates record batches
276246
// and puts them in memory store for each event.
277-
fn process_event<R: std::io::Read>(&self, mut event: json::Reader<R>) -> Result<u64, Error> {
278-
let rb = event.next()?.ok_or(Error::MissingRecord)?;
247+
fn process_event<R: std::io::Read>(
248+
&self,
249+
mut event: json::Reader<R>,
250+
) -> Result<u64, EventError> {
251+
let rb = event.next()?.ok_or(EventError::MissingRecord)?;
279252
let stream_name = &self.stream_name;
280253

281-
STREAM_WRITERS::append_to_local(stream_name, &rb).unwrap();
254+
STREAM_WRITERS::append_to_local(stream_name, &rb)?;
282255

283256
Ok(0)
284257
}
285258

286259
// inferSchema is a constructor to Schema
287260
// returns raw arrow schema type and arrow schema to string type.
288-
fn infer_schema(&self) -> Result<Schema, Error> {
261+
fn infer_schema(&self) -> Result<Schema, ArrowError> {
289262
let reader = self.body.as_bytes();
290263
let mut buf_reader = BufReader::new(reader);
291-
let inferred_schema = infer_json_schema(&mut buf_reader, None)?;
292-
293-
Ok(inferred_schema)
264+
infer_json_schema(&mut buf_reader, None)
294265
}
295266

296267
fn get_reader(&self, arrow_schema: arrow::datatypes::Schema) -> json::Reader<&[u8]> {
@@ -301,3 +272,27 @@ impl Event {
301272
)
302273
}
303274
}
275+
276+
pub mod error {
277+
use crate::metadata::error::stream_info::MetadataError;
278+
use crate::storage::ObjectStorageError;
279+
use datafusion::arrow::error::ArrowError;
280+
281+
use super::StreamWriterError;
282+
283+
#[derive(Debug, thiserror::Error)]
284+
pub enum EventError {
285+
#[error("Missing Record from event body")]
286+
MissingRecord,
287+
#[error("Stream Writer Failed: {0}")]
288+
StreamWriter(#[from] StreamWriterError),
289+
#[error("Metadata Error: {0}")]
290+
Metadata(#[from] MetadataError),
291+
#[error("Stream Writer Failed: {0}")]
292+
Arrow(#[from] ArrowError),
293+
#[error("Schema Mismatch: {0}")]
294+
SchemaMismatch(String),
295+
#[error("Schema Mismatch: {0}")]
296+
ObjectStorage(#[from] ObjectStorageError),
297+
}
298+
}

0 commit comments

Comments
 (0)