@@ -10,5 +10,148 @@ end); recursive=true)
10
10
11
11
doctest (TidierFiles)
12
12
13
+
14
+ @testset " JSON Test" begin
15
+
16
+ function roundTripDataFrame (df:: DataFrame ; JSONObjectVector= true )
17
+ write_json (df, " testdf.json" ;JSONObjectVector )
18
+ df_read = read_json (" testdf.json" )
19
+ return isequal (df, df_read)
20
+ end
21
+
22
+
23
+ df_anscombe = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/anscombe.json" )
24
+
25
+ @test typeof (df_anscombe. X) == Vector{Float64}
26
+
27
+ @test roundTripDataFrame (df_anscombe, JSONObjectVector = false )
28
+
29
+ df_barley = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/barley.json" )
30
+
31
+ @test typeof (df_barley. yield) == Vector{Float64}
32
+
33
+ @test roundTripDataFrame (df_barley;JSONObjectVector= false )
34
+
35
+ df_budget = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/budget.json" )
36
+
37
+ @test roundTripDataFrame (df_budget)
38
+
39
+ df_budgets = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/budgets.json" )
40
+
41
+ @test roundTripDataFrame (df_budgets)
42
+ @test typeof (df_budgets. value) == Vector{Float64}
43
+ @test typeof (df_budgets. budgetYear) == Vector{Int64}
44
+
45
+
46
+ df_burtin = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/burtin.json" )
47
+
48
+ @test roundTripDataFrame (df_burtin)
49
+
50
+ df_cars = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/cars.json" )
51
+
52
+ @test sum (skipmissing (df_cars. Horsepower)) == 42033
53
+
54
+ @test roundTripDataFrame (df_cars)
55
+
56
+ df_countries = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/countries.json" )
57
+
58
+ @test sum (skipmissing (df_countries. p_life_expect)) ≈ 36591.29 atol= 0.01
59
+
60
+
61
+ df_crimea = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/crimea.json" )
62
+
63
+ @test roundTripDataFrame (df_crimea)
64
+
65
+ df_driving = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/driving.json" )
66
+
67
+ @test roundTripDataFrame (df_driving)
68
+
69
+
70
+ df_flights_200k = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/flights-200k.json" )
71
+
72
+ @test roundTripDataFrame (df_flights_200k)
73
+
74
+ df_football = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/football.json" )
75
+
76
+ @test roundTripDataFrame (df_football)
77
+
78
+ df_income = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/income.json" )
79
+
80
+ @test roundTripDataFrame (df_income)
81
+
82
+ df_jobs = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/jobs.json" )
83
+
84
+ @test roundTripDataFrame (df_jobs)
85
+
86
+
87
+ df_movies = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/movies.json" )
88
+
89
+ @test roundTripDataFrame (df_movies)
90
+
91
+ df_obesity = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/obesity.json" )
92
+
93
+ @test sum (df_obesity. rate) ≈ 7.791 atol= 0.01
94
+
95
+ df_ohlc = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/ohlc.json" )
96
+
97
+ @test sum (df_ohlc. open) ≈ 1223.04 atol= 0.01
98
+
99
+ df_penguins = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/penguins.json" )
100
+
101
+ @test sum (skipmissing (df_penguins." Flipper Length (mm)" )) == 68713
102
+
103
+ @test roundTripDataFrame (df_penguins)
104
+
105
+
106
+ df_platformer_terrain = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/platformer-terrain.json" )
107
+
108
+ df_political_contributions = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/political-contributions.json" )
109
+
110
+ df_population = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/population.json" )
111
+
112
+ df_udistrict = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/udistrict.json" )
113
+
114
+ df_unemployment = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/unemployment-across-industries.json" )
115
+
116
+ @test roundTripDataFrame (df_unemployment)
117
+
118
+
119
+ df_uniform_2d = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/uniform-2d.json" )
120
+
121
+ df_uniform_2d = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/us-10m.json" )
122
+
123
+ df_us_state_capitals = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/us-state-capitals.json" )
124
+
125
+ @test roundTripDataFrame (df_us_state_capitals)
126
+
127
+ @test sum (df_us_state_capitals. lat) ≈ 1970.67 atol= 0.01
128
+
129
+ # df_volcano = read_json("https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/volcano.json")
130
+
131
+ df_weekly_weather = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/weekly-weather.json" )
132
+
133
+
134
+ #=
135
+ df_weekly_weather_without_missing=dropmissing(df_weekly_weather, :forecast)
136
+
137
+ df_weekly_weather_unnested=@unnest_wider(df_weekly_weather_without_missing, normal, record, forecast)
138
+ sum(skipmissing(df_weekly_weather_unnested.forecast_high))
139
+
140
+ df_weekly_weather_unnested2=@unnest_wider(df_weekly_weather, normal, record, forecast)
141
+ sum(skipmissing(df_weekly_weather_unnested2.forecast_high))
142
+ =#
143
+
144
+ df_wheat = read_json (" https://raw.githubusercontent.com/vega/vega-datasets/refs/heads/main/data/wheat.json" )
145
+
146
+ @test sum (skipmissing (df_wheat. wages)) ≈ 579.08 atol= 0.01
147
+
148
+ @test typeof (df_wheat. wheat) == Vector{Float64}
149
+
150
+ @test roundTripDataFrame (df_wheat)
151
+
152
+
153
+ end
154
+
155
+
13
156
end
14
157
0 commit comments