Skip to content
This repository was archived by the owner on May 22, 2025. It is now read-only.

Commit 9040dbe

Browse files
committed
Initial commit.
0 parents  commit 9040dbe

File tree

7 files changed

+319
-0
lines changed

7 files changed

+319
-0
lines changed

.gitignore

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
/target
2+
3+
.vscode
4+
Cargo.lock

Cargo.toml

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
[package]
2+
name = "feedback-normalization"
3+
version = "0.1.0"
4+
authors = ["ashcorr"]
5+
edition = "2018"
6+
7+
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
8+
9+
[dependencies]
10+
mysql = "17.0.0"
11+
serde_json = "1.0"

Dockerfile

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
FROM rust:1.40 as build
2+
3+
COPY ./ ./
4+
5+
RUN cargo build --release
6+
7+
FROM ubuntu:18.04
8+
9+
ENV DEBIAN_FRONTEND=noninteractive
10+
11+
COPY --from=build /target/release/feedback-normalization /
12+
13+
CMD /feedback-normalization

Jenkinsfile

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
node {
2+
def app
3+
4+
stage('Clone repository') {
5+
checkout scm
6+
}
7+
8+
stage('Build image') {
9+
app = docker.build("yogstation/feedback-normalization")
10+
}
11+
12+
stage('Push image') {
13+
docker.withRegistry('https://registry.hub.docker.com', 'yogstation-docker-hub-credentials') {
14+
app.push("${env.BUILD_NUMBER}")
15+
app.push("latest")
16+
}
17+
}
18+
19+
stage('Deploy') {
20+
withKubeConfig([credentialsId: 'yogstation-kubeconf-credentials']) {
21+
sh 'kubectl set image --namespace yogbot deployment/feedback-normalization feedback-normalization=yogstation/feedback-normalization:${BUILD_NUMBER}'
22+
}
23+
}
24+
}

src/main.rs

Lines changed: 189 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,189 @@
1+
#[macro_use]
2+
extern crate mysql;
3+
4+
use mysql::chrono::NaiveDateTime;
5+
use serde_json::Value;
6+
use std::env;
7+
8+
#[derive(Debug)]
9+
pub struct FeedbackEvent {
10+
id: i32,
11+
datetime: NaiveDateTime,
12+
round_id: i32,
13+
key_name: String,
14+
key_type: String,
15+
version: i32,
16+
json: Value,
17+
}
18+
19+
#[derive(Debug)]
20+
pub struct FeedbackEventNormalised {
21+
datetime: NaiveDateTime,
22+
round_id: i32,
23+
category_primary: String,
24+
category_secondary: String,
25+
category_tertiary: String,
26+
version: i32,
27+
value: String,
28+
}
29+
30+
fn main() -> Result<(), std::io::Error> {
31+
// let database_url =
32+
// env::var("FEEDBACK_DATABASE_URL").expect("Could not read FEEDBACK_DATABASE_URL variable");
33+
// let feedback_table_name =
34+
// env::var("FEEDBACK_TABLE_NAME").expect("Could not read FEEDBACK_TABLE_NAME variable");
35+
// let feedback_normalized_table_name = env::var("FEEDBACK_NORMALISED_TABLE_NAME")
36+
// .expect("Could not read FEEDBACK_NORMALISED_TABLE_NAME variable");
37+
let database_url = "mysql://root:password@localhost:3306/ss13";
38+
let feedback_table_name = "erro_feedback";
39+
let feedback_normalized_table_name = "erro_feedback_normalized";
40+
let pool = mysql::Pool::new(database_url).unwrap();
41+
42+
let events: Vec<FeedbackEvent> = pool
43+
.prep_exec(
44+
format!(
45+
"SELECT * FROM {} WHERE `round_id` NOT IN (SELECT `round_id` FROM {})",
46+
feedback_table_name, feedback_normalized_table_name
47+
),
48+
(),
49+
)
50+
.map(|result| {
51+
result
52+
.map(|x| x.unwrap())
53+
.map(|row| {
54+
let (id, datetime, round_id, key_name, key_type, version, json) =
55+
mysql::from_row(row);
56+
let json: String = json;
57+
let json = serde_json::from_str(&json).unwrap();
58+
FeedbackEvent {
59+
id: id,
60+
datetime: datetime,
61+
round_id: round_id,
62+
key_name: key_name,
63+
key_type: key_type,
64+
version: version,
65+
json: json,
66+
}
67+
})
68+
.collect()
69+
})
70+
.unwrap();
71+
72+
let mut normalised_events = Vec::new();
73+
for event in &events {
74+
normalised_events.extend(match event.key_type.as_str() {
75+
"amount" => process_amount(event),
76+
"tally" => process_tally(event),
77+
"associative" => process_associative(event),
78+
"nested tally" => process_nested_tally(event),
79+
"text" => process_text(event),
80+
_ => panic!("Unexpected key_type: {}", event.key_type),
81+
});
82+
}
83+
84+
let mut stmt = pool
85+
.prepare(format!(
86+
"INSERT INTO {} (datetime, round_id, category_primary, category_secondary, category_tertiary, version, data) VALUES
87+
(:datetime, :round_id, :category_primary, :category_secondary, :category_tertiary, :version, :data)",
88+
feedback_normalized_table_name
89+
)).unwrap();
90+
91+
for event in normalised_events.iter() {
92+
stmt.execute(params! {
93+
"datetime" => event.datetime,
94+
"round_id" => event.round_id,
95+
"category_primary" => &event.category_primary,
96+
"category_secondary" => &event.category_secondary,
97+
"category_tertiary" => &event.category_tertiary,
98+
"version" => event.version,
99+
"data" => &event.value,
100+
})
101+
.unwrap();
102+
}
103+
104+
// println!("Hello, world! {:?}", normalised_events);
105+
106+
Ok(())
107+
}
108+
109+
pub fn process_amount(event: &FeedbackEvent) -> Vec<FeedbackEventNormalised> {
110+
vec![FeedbackEventNormalised {
111+
datetime: event.datetime,
112+
round_id: event.round_id,
113+
category_primary: event.key_name.clone(),
114+
category_secondary: "".to_string(),
115+
category_tertiary: "".to_string(),
116+
version: event.version,
117+
value: event.json["data"].to_string(),
118+
}]
119+
}
120+
121+
pub fn process_tally(event: &FeedbackEvent) -> Vec<FeedbackEventNormalised> {
122+
let mut normalised_events = vec![];
123+
for (key, value) in event.json["data"].as_object().unwrap() {
124+
normalised_events.push(FeedbackEventNormalised {
125+
datetime: event.datetime,
126+
round_id: event.round_id,
127+
category_primary: event.key_name.clone(),
128+
category_secondary: key.to_string(),
129+
category_tertiary: "".to_string(),
130+
version: event.version,
131+
value: value.to_string(),
132+
});
133+
}
134+
135+
normalised_events
136+
}
137+
138+
pub fn process_nested_tally(event: &FeedbackEvent) -> Vec<FeedbackEventNormalised> {
139+
let mut normalised_events = vec![];
140+
for (key, value) in event.json["data"].as_object().unwrap() {
141+
for (nested_key, nested_value) in value.as_object().unwrap() {
142+
normalised_events.push(FeedbackEventNormalised {
143+
datetime: event.datetime,
144+
round_id: event.round_id,
145+
category_primary: event.key_name.clone(),
146+
category_secondary: key.to_string(),
147+
category_tertiary: nested_key.to_string(),
148+
version: event.version,
149+
value: nested_value.to_string(),
150+
});
151+
}
152+
}
153+
154+
normalised_events
155+
}
156+
157+
pub fn process_associative(event: &FeedbackEvent) -> Vec<FeedbackEventNormalised> {
158+
println!("Received associative value, ignoring. Associative events will need a more manual approach. {:?}", event.key_name);
159+
vec![]
160+
}
161+
162+
pub fn process_text(event: &FeedbackEvent) -> Vec<FeedbackEventNormalised> {
163+
let mut normalised_events = vec![];
164+
if event.json["data"].is_array() {
165+
for value in event.json["data"].as_array().unwrap() {
166+
normalised_events.push(FeedbackEventNormalised {
167+
datetime: event.datetime,
168+
round_id: event.round_id,
169+
category_primary: event.key_name.clone(),
170+
category_secondary: "".to_string(),
171+
category_tertiary: "".to_string(),
172+
version: event.version,
173+
value: value.to_string(),
174+
});
175+
}
176+
} else {
177+
normalised_events.push(FeedbackEventNormalised {
178+
datetime: event.datetime,
179+
round_id: event.round_id,
180+
category_primary: event.key_name.clone(),
181+
category_secondary: "".to_string(),
182+
category_tertiary: "".to_string(),
183+
version: event.version,
184+
value: event.json["data"].to_string(),
185+
});
186+
}
187+
188+
normalised_events
189+
}

0 commit comments

Comments
 (0)