Skip to content

Commit a0ad02b

Browse files
committed
Add CI job to test memory consumption on large files
1 parent d94a988 commit a0ad02b

File tree

2 files changed

+43
-0
lines changed

2 files changed

+43
-0
lines changed

.github/workflows/large-files.yml

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
name: large-files
2+
on:
3+
- push
4+
- pull_request
5+
6+
defaults:
7+
run:
8+
shell: bash
9+
10+
jobs:
11+
build:
12+
runs-on: 'ubuntu-latest'
13+
steps:
14+
- uses: actions/checkout@v3
15+
- uses: haskell-actions/setup@v2
16+
id: setup-haskell-cabal
17+
with:
18+
ghc-version: 'latest'
19+
- name: Update cabal package database
20+
run: cabal update
21+
- uses: actions/cache@v3
22+
name: Cache cabal stuff
23+
with:
24+
path: ${{ steps.setup-haskell-cabal.outputs.cabal-store }}
25+
key: large-files
26+
- name: Test
27+
run: htar/test-large-files.sh

htar/test-large-files.sh

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
#!/bin/sh
2+
# Test that htar is capable to pack and unpack large files,
3+
# without materialising them in memory in full.
4+
5+
set -eux
6+
cabal build htar
7+
HTAR=$(cabal list-bin htar)
8+
cd "$(mktemp -d)"
9+
mkdir input
10+
for i in $(seq 0 4); do
11+
dd if=/dev/zero of="input/$i.txt" bs=1M count=2048
12+
done;
13+
$HTAR --create --verbose --file input.tar.gz input +RTS -s -M50M
14+
rm -rf input
15+
$HTAR --extract --verbose --file input.tar.gz +RTS -s -M50M
16+
ls -l input

0 commit comments

Comments
 (0)