Skip to content

Commit 63fb577

Browse files
authored
Merge pull request #17 from JuliaAstro/quality
chore: Julia v12 updates
2 parents f961c03 + fa1e21b commit 63fb577

File tree

9 files changed

+122
-112
lines changed

9 files changed

+122
-112
lines changed

.github/workflows/CI.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,8 @@ jobs:
2727
- uses: julia-actions/cache@v2
2828
- uses: julia-actions/julia-buildpkg@v1
2929
- uses: julia-actions/julia-runtest@v1
30+
with:
31+
test_args: '--verbose'
3032
- uses: julia-actions/julia-processcoverage@v1
3133
- uses: codecov/codecov-action@v5
3234
with:

Project.toml

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,12 @@
11
name = "ASDF"
22
uuid = "686f71d1-807d-59a4-a860-28280ea06d7b"
3-
authors = ["Erik Schnetter <schnetter@gmail.com>"]
43
version = "2.0.0"
4+
authors = ["Erik Schnetter <schnetter@gmail.com>"]
5+
6+
[workspace]
7+
projects = ["test", "docs"]
58

69
[deps]
7-
BlockArrays = "8e7c35d0-a365-5155-bbbb-fb81a777f24e"
810
ChunkCodecLibBlosc = "c6a955be-ab7f-4fbb-b38f-caf93db6b928"
911
ChunkCodecLibBzip2 = "2b723af9-f480-4e8d-a1e4-4a9f5a906122"
1012
ChunkCodecLibLz4 = "7e9cc85e-5614-42a3-ad86-b78f920b38a5"
@@ -17,7 +19,6 @@ StridedViews = "4db3bf67-4bd7-4b4e-b153-31dc3fb37143"
1719
YAML = "ddb6d928-2868-570f-bddf-ab3f9cf99eb6"
1820

1921
[compat]
20-
BlockArrays = "0.16.43, 1"
2122
ChunkCodecLibBlosc = "0.2.0, 0.3"
2223
ChunkCodecLibBzip2 = "0.2.0, 1.0"
2324
ChunkCodecLibLz4 = "0.2.1, 1.0"

src/ASDF.jl

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,15 @@
11
module ASDF
22

3-
using BlockArrays
4-
using CodecXz
5-
using ChunkCodecLibBlosc
6-
using ChunkCodecLibBzip2
7-
using ChunkCodecLibLz4
8-
using ChunkCodecLibZlib
9-
using ChunkCodecLibZstd
10-
using MD5
11-
using PkgVersion
12-
using StridedViews
13-
using YAML
3+
using ChunkCodecLibBlosc: BloscCodec, BloscEncodeOptions
4+
using ChunkCodecLibBzip2: BZ2Codec, BZ2EncodeOptions
5+
using ChunkCodecLibLz4: LZ4FrameCodec, LZ4FrameEncodeOptions
6+
using ChunkCodecLibZlib: ZlibCodec, ZlibEncodeOptions
7+
using ChunkCodecLibZstd: ZstdCodec, ZstdEncodeOptions, decode, encode
8+
using CodecXz: XzCompressor, XzDecompressor
9+
using MD5: md5
10+
using PkgVersion: PkgVersion
11+
using StridedViews: StridedView
12+
using YAML: YAML
1413

1514
################################################################################
1615

test/Project.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,6 @@
11
[deps]
2+
ASDF = "686f71d1-807d-59a4-a860-28280ea06d7b"
3+
Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595"
4+
ParallelTestRunner = "d3525ed8-44d0-4b2c-a655-542cee43accc"
25
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
36
YAML = "ddb6d928-2868-570f-bddf-ab3f9cf99eb6"

test/runtests.jl

Lines changed: 20 additions & 98 deletions
Original file line numberDiff line numberDiff line change
@@ -1,103 +1,25 @@
1+
using ParallelTestRunner: runtests, find_tests, parse_args
12
using ASDF
2-
using Test
3-
using YAML
43

5-
map_tree(f, x) = f(x)
6-
map_tree(f, vec::AbstractVector) = [map_tree(f, elem) for elem in vec]
7-
map_tree(f, dict::AbstractDict) = Dict(key => map_tree(f, val) for (key, val) in dict)
8-
9-
output(x) = nothing
10-
function output(arr::ASDF.NDArray)
11-
println("source: $(arr.source)")
12-
data = arr[]
13-
println(" type: $(typeof(data))")
14-
return println(" size: $(size(data))")
15-
end
16-
17-
################################################################################
18-
19-
@testset "Read ASDF file" begin
20-
asdf = ASDF.load_file("blue_upchan_gain.00000000.asdf")
21-
println(YAML.write(asdf.metadata))
22-
23-
map_tree(output, asdf.metadata)
24-
25-
buffer = asdf.metadata[0]["buffer"][]
26-
@test eltype(buffer) == Float16
27-
@test size(buffer) == (256,)
28-
@test buffer == fill(1, 256)
29-
30-
dish_index = asdf.metadata[0]["dish_index"][]
31-
@test eltype(dish_index) == Int32
32-
@test size(dish_index) == (3, 2)
33-
@test dish_index == [
34-
-1 -1
35-
42 53
36-
43 54
37-
]
38-
end
39-
40-
################################################################################
41-
42-
@testset "Read ASDF file with chunked arrays" begin
43-
asdf = ASDF.load_file("chunking.asdf")
44-
println(YAML.write(asdf.metadata))
45-
46-
map_tree(output, asdf.metadata)
47-
48-
chunky = asdf.metadata["chunky"][]
49-
@test eltype(chunky) == Float16
50-
@test size(chunky) == (4, 4)
51-
@test chunky == [
52-
11 21 31 41
53-
12 22 32 42
54-
13 23 33 43
55-
14 24 34 44
56-
]
4+
const init_code = quote
5+
using ASDF
6+
using Test
7+
using YAML
8+
9+
map_tree(f, x) = f(x)
10+
map_tree(f, vec::AbstractVector) = [map_tree(f, elem) for elem in vec]
11+
map_tree(f, dict::AbstractDict) = Dict(key => map_tree(f, val) for (key, val) in dict)
12+
13+
output(x) = nothing
14+
function output(arr::ASDF.NDArray)
15+
println("source: $(arr.source)")
16+
data = arr[]
17+
println(" type: $(typeof(data))")
18+
return println(" size: $(size(data))")
19+
end
5720
end
5821

59-
################################################################################
60-
61-
@testset "Write ASDF file" begin
62-
dirname = mktempdir(; cleanup=true)
63-
filename = joinpath(dirname, "output.asdf")
64-
65-
array = Float64[1/(i+j+k-2) for i in 1:50, j in 1:51, k in 1:52]
66-
doc = Dict{Any,Any}(
67-
"data1" => ASDF.NDArrayWrapper([1 2; 3 4]; inline=false),
68-
"data2" => ASDF.NDArrayWrapper([1 2; 3 4]; inline=true),
69-
"group" => Dict{Any,Any}(
70-
"element1" => ASDF.NDArrayWrapper(array; compression=ASDF.C_None),
71-
"element2" => ASDF.NDArrayWrapper(array; compression=ASDF.C_Blosc),
72-
"element3" => ASDF.NDArrayWrapper(array; compression=ASDF.C_Bzip2),
73-
"element4" => ASDF.NDArrayWrapper(array; compression=ASDF.C_Lz4),
74-
"element5" => ASDF.NDArrayWrapper(array; compression=ASDF.C_Xz),
75-
"element6" => ASDF.NDArrayWrapper(array; compression=ASDF.C_Zlib),
76-
"element7" => ASDF.NDArrayWrapper(array; compression=ASDF.C_Zstd),
77-
),
78-
)
79-
ASDF.write_file(filename, doc)
22+
args = parse_args(Base.ARGS)
23+
testsuite = find_tests(@__DIR__)
8024

81-
doc′ = ASDF.load_file(filename)
82-
map_tree(output, doc′.metadata)
83-
84-
data1 = doc["data1"][]
85-
data1′ = doc′.metadata["data1"][]
86-
@test eltype(data1′) == eltype(data1)
87-
@test size(data1′) == size(data1)
88-
@test data1′ == data1
89-
90-
data2 = doc["data2"][]
91-
data2′ = doc′.metadata["data2"][]
92-
@test eltype(data2′) == eltype(data2)
93-
@test size(data2′) == size(data2)
94-
@test data2′ == data2
95-
96-
for n in 1:7
97-
element = doc["group"]["element$n"][]
98-
element′ = doc′.metadata["group"]["element$n"][]
99-
@test eltype(element′) == eltype(element)
100-
@test size(element′) == size(element)
101-
@test element′ == element
102-
end
103-
end
25+
runtests(ASDF, args; testsuite, init_code)

test/test-aqua.jl

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
@testset "Aqua" begin
2+
using ASDF, Aqua
3+
Aqua.test_all(ASDF)
4+
end

test/test-read.jl

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
@testset "Read ASDF file" begin
2+
asdf = ASDF.load_file("blue_upchan_gain.00000000.asdf")
3+
println(YAML.write(asdf.metadata))
4+
5+
map_tree(output, asdf.metadata)
6+
7+
buffer = asdf.metadata[0]["buffer"][]
8+
@test eltype(buffer) == Float16
9+
@test size(buffer) == (256,)
10+
@test buffer == fill(1, 256)
11+
12+
dish_index = asdf.metadata[0]["dish_index"][]
13+
@test eltype(dish_index) == Int32
14+
@test size(dish_index) == (3, 2)
15+
@test dish_index == [
16+
-1 -1
17+
42 53
18+
43 54
19+
]
20+
end

test/test-read_chunked.jl

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
@testset "Read ASDF file with chunked arrays" begin
2+
asdf = ASDF.load_file("chunking.asdf")
3+
println(YAML.write(asdf.metadata))
4+
5+
map_tree(output, asdf.metadata)
6+
7+
chunky = asdf.metadata["chunky"][]
8+
@test eltype(chunky) == Float16
9+
@test size(chunky) == (4, 4)
10+
@test chunky == [
11+
11 21 31 41
12+
12 22 32 42
13+
13 23 33 43
14+
14 24 34 44
15+
]
16+
end

test/test-write.jl

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
@testset "Write ASDF file" begin
2+
dirname = mktempdir(; cleanup=true)
3+
filename = joinpath(dirname, "output.asdf")
4+
5+
array = Float64[1/(i+j+k-2) for i in 1:50, j in 1:51, k in 1:52]
6+
doc = Dict{Any,Any}(
7+
"data1" => ASDF.NDArrayWrapper([1 2; 3 4]; inline=false),
8+
"data2" => ASDF.NDArrayWrapper([1 2; 3 4]; inline=true),
9+
"group" => Dict{Any,Any}(
10+
"element1" => ASDF.NDArrayWrapper(array; compression=ASDF.C_None),
11+
"element2" => ASDF.NDArrayWrapper(array; compression=ASDF.C_Blosc),
12+
"element3" => ASDF.NDArrayWrapper(array; compression=ASDF.C_Bzip2),
13+
"element4" => ASDF.NDArrayWrapper(array; compression=ASDF.C_Lz4),
14+
"element5" => ASDF.NDArrayWrapper(array; compression=ASDF.C_Xz),
15+
"element6" => ASDF.NDArrayWrapper(array; compression=ASDF.C_Zlib),
16+
"element7" => ASDF.NDArrayWrapper(array; compression=ASDF.C_Zstd),
17+
),
18+
)
19+
ASDF.write_file(filename, doc)
20+
21+
doc′ = ASDF.load_file(filename)
22+
map_tree(output, doc′.metadata)
23+
24+
data1 = doc["data1"][]
25+
data1′ = doc′.metadata["data1"][]
26+
@test eltype(data1′) == eltype(data1)
27+
@test size(data1′) == size(data1)
28+
@test data1′ == data1
29+
30+
data2 = doc["data2"][]
31+
data2′ = doc′.metadata["data2"][]
32+
@test eltype(data2′) == eltype(data2)
33+
@test size(data2′) == size(data2)
34+
@test data2′ == data2
35+
36+
for n in 1:7
37+
element = doc["group"]["element$n"][]
38+
element′ = doc′.metadata["group"]["element$n"][]
39+
@test eltype(element′) == eltype(element)
40+
@test size(element′) == size(element)
41+
@test element′ == element
42+
end
43+
end

0 commit comments

Comments
 (0)