Skip to content

Commit 02bde12

Browse files
authored
feat: add ETL exercise (#76)
1 parent a2534af commit 02bde12

14 files changed

+303
-0
lines changed

config.json

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -218,6 +218,14 @@
218218
"prerequisites": [],
219219
"difficulty": 8
220220
},
221+
{
222+
"slug": "etl",
223+
"name": "ETL",
224+
"uuid": "63792af3-0fce-4c90-8cc3-4844ad6b6861",
225+
"practices": [],
226+
"prerequisites": [],
227+
"difficulty": 5
228+
},
221229
{
222230
"slug": "rna-transcription",
223231
"name": "RNA Transcription",
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
# SQLite specific instructions
2+
3+
This exercise requires you set the `result` column of the `etl` table to the correct value based on the JSON object found in the `input` column. The keys in the result object must be sorted alphabetically.
4+
5+
## Table Schema
6+
7+
```sql
8+
CREATE TABLE "etl" ("input" TEXT, "result" TEXT);
9+
```
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
# Instructions
2+
3+
Your task is to change the data format of letters and their point values in the game.
4+
5+
Currently, letters are stored in groups based on their score, in a one-to-many mapping.
6+
7+
- 1 point: "A", "E", "I", "O", "U", "L", "N", "R", "S", "T",
8+
- 2 points: "D", "G",
9+
- 3 points: "B", "C", "M", "P",
10+
- 4 points: "F", "H", "V", "W", "Y",
11+
- 5 points: "K",
12+
- 8 points: "J", "X",
13+
- 10 points: "Q", "Z",
14+
15+
This needs to be changed to store each individual letter with its score in a one-to-one mapping.
16+
17+
- "a" is worth 1 point.
18+
- "b" is worth 3 points.
19+
- "c" is worth 3 points.
20+
- "d" is worth 2 points.
21+
- etc.
22+
23+
As part of this change, the team has also decided to change the letters to be lower-case rather than upper-case.
24+
25+
~~~~exercism/note
26+
If you want to look at how the data was previously structured and how it needs to change, take a look at the examples in the test suite.
27+
~~~~
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
# Introduction
2+
3+
You work for a company that makes an online multiplayer game called Lexiconia.
4+
5+
To play the game, each player is given 13 letters, which they must rearrange to create words.
6+
Different letters have different point values, since it's easier to create words with some letters than others.
7+
8+
The game was originally launched in English, but it is very popular, and now the company wants to expand to other languages as well.
9+
10+
Different languages need to support different point values for letters.
11+
The point values are determined by how often letters are used, compared to other letters in that language.
12+
13+
For example, the letter 'C' is quite common in English, and is only worth 3 points.
14+
But in Norwegian it's a very rare letter, and is worth 10 points.
15+
16+
To make it easier to add new languages, your team needs to change the way letters and their point values are stored in the game.
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
{
2+
"authors": [
3+
"Steffan153"
4+
],
5+
"files": {
6+
"solution": [
7+
"etl.sql"
8+
],
9+
"test": [
10+
"etl_test.sql"
11+
],
12+
"example": [
13+
".meta/example.sql"
14+
],
15+
"editor": [
16+
"data.csv"
17+
]
18+
},
19+
"blurb": "Change the data format for scoring a game to more easily add other languages.",
20+
"source": "Based on an exercise by the JumpstartLab team for students at The Turing School of Software and Design.",
21+
"source_url": "https://turing.edu"
22+
}
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
UPDATE etl
2+
SET result = (
3+
SELECT json_group_object(LOWER(value), TRIM(path, '$."') + 0)
4+
FROM (
5+
SELECT value, path
6+
FROM json_tree(input)
7+
WHERE type = 'text'
8+
ORDER BY value
9+
)
10+
);
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
# This is an auto-generated file.
2+
#
3+
# Regenerating this file via `configlet sync` will:
4+
# - Recreate every `description` key/value pair
5+
# - Recreate every `reimplements` key/value pair, where they exist in problem-specifications
6+
# - Remove any `include = true` key/value pair (an omitted `include` key implies inclusion)
7+
# - Preserve any other key/value pair
8+
#
9+
# As user-added comments (using the # character) will be removed when this file
10+
# is regenerated, comments can be added via a `comment` key.
11+
12+
[78a7a9f9-4490-4a47-8ee9-5a38bb47d28f]
13+
description = "single letter"
14+
15+
[60dbd000-451d-44c7-bdbb-97c73ac1f497]
16+
description = "single score with multiple letters"
17+
18+
[f5c5de0c-301f-4fdd-a0e5-df97d4214f54]
19+
description = "multiple scores with multiple letters"
20+
21+
[5db8ea89-ecb4-4dcd-902f-2b418cc87b9d]
22+
description = "multiple scores with differing numbers of letters"
Lines changed: 105 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,105 @@
1+
{
2+
"exercise": "etl",
3+
"comments": [
4+
"Transforms a set of legacy Lexiconia data stored as letters per score",
5+
"to a set of data stored score per letter.",
6+
"Note: The expected input data for these tests should have",
7+
"integer keys (not stringified numbers as shown in the JSON below",
8+
"Unless the language prohibits that, please implement these tests",
9+
"such that keys are integers. e.g. in JavaScript, it might look ",
10+
"like `transform( { 1: ['A'] } );`"
11+
],
12+
"cases": [
13+
{
14+
"uuid": "78a7a9f9-4490-4a47-8ee9-5a38bb47d28f",
15+
"description": "single letter",
16+
"property": "transform",
17+
"input": {
18+
"legacy": {
19+
"1": ["A"]
20+
}
21+
},
22+
"expected": {
23+
"a": 1
24+
}
25+
},
26+
{
27+
"uuid": "60dbd000-451d-44c7-bdbb-97c73ac1f497",
28+
"description": "single score with multiple letters",
29+
"property": "transform",
30+
"input": {
31+
"legacy": {
32+
"1": ["A", "E", "I", "O", "U"]
33+
}
34+
},
35+
"expected": {
36+
"a": 1,
37+
"e": 1,
38+
"i": 1,
39+
"o": 1,
40+
"u": 1
41+
}
42+
},
43+
{
44+
"uuid": "f5c5de0c-301f-4fdd-a0e5-df97d4214f54",
45+
"description": "multiple scores with multiple letters",
46+
"property": "transform",
47+
"input": {
48+
"legacy": {
49+
"1": ["A", "E"],
50+
"2": ["D", "G"]
51+
}
52+
},
53+
"expected": {
54+
"a": 1,
55+
"d": 2,
56+
"e": 1,
57+
"g": 2
58+
}
59+
},
60+
{
61+
"uuid": "5db8ea89-ecb4-4dcd-902f-2b418cc87b9d",
62+
"description": "multiple scores with differing numbers of letters",
63+
"property": "transform",
64+
"input": {
65+
"legacy": {
66+
"1": ["A", "E", "I", "O", "U", "L", "N", "R", "S", "T"],
67+
"2": ["D", "G"],
68+
"3": ["B", "C", "M", "P"],
69+
"4": ["F", "H", "V", "W", "Y"],
70+
"5": ["K"],
71+
"8": ["J", "X"],
72+
"10": ["Q", "Z"]
73+
}
74+
},
75+
"expected": {
76+
"a": 1,
77+
"b": 3,
78+
"c": 3,
79+
"d": 2,
80+
"e": 1,
81+
"f": 4,
82+
"g": 2,
83+
"h": 4,
84+
"i": 1,
85+
"j": 8,
86+
"k": 5,
87+
"l": 1,
88+
"m": 3,
89+
"n": 1,
90+
"o": 1,
91+
"p": 3,
92+
"q": 10,
93+
"r": 1,
94+
"s": 1,
95+
"t": 1,
96+
"u": 1,
97+
"v": 4,
98+
"w": 4,
99+
"x": 8,
100+
"y": 4,
101+
"z": 10
102+
}
103+
}
104+
]
105+
}
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
DROP TABLE IF EXISTS "etl";
2+
CREATE TABLE "etl" (
3+
"input" TEXT,
4+
"result" TEXT
5+
);
6+
7+
.mode csv
8+
.import ./data.csv etl

exercises/practice/etl/data.csv

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
"{""1"":[""A""]}",""
2+
"{""1"":[""A"",""E"",""I"",""O"",""U""]}",""
3+
"{""1"":[""A"",""E""],""2"":[""D"",""G""]}",""
4+
"{""1"":[""A"",""E"",""I"",""O"",""U"",""L"",""N"",""R"",""S"",""T""],""2"":[""D"",""G""],""3"":[""B"",""C"",""M"",""P""],""4"":[""F"",""H"",""V"",""W"",""Y""],""5"":[""K""],""8"":[""J"",""X""],""10"":[""Q"",""Z""]}",""

0 commit comments

Comments
 (0)