-
Notifications
You must be signed in to change notification settings - Fork 8
197 lines (188 loc) · 8.91 KB
/
linux.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
name: HDF5/develop
on:
pull_request:
branches: [ develop ]
push:
branches: [ develop ]
paths-ignore:
- '**.md'
- '**.txt'
- 'docs/**'
# Allows to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
vol-cache:
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- uses: actions/[email protected]
- name: Dependencies
run: |
sudo apt-get update
sudo apt-get install libtool
git config pull.rebase false
# hdf5
git clone https://github.com/HDFGroup/hdf5.git
# async vol
git clone https://github.com/hpc-io/vol-async.git
# Argobots
git clone https://github.com/pmodels/argobots.git
# h5bench
git clone https://github.com/zhenghh04/h5bench.git
# mpi
sudo apt-get install libopenmpi-dev
# zlib
sudo apt-get install zlib1g-dev
# python3
sudo apt-get install python3
- name: Installation
run: |
export mydir="$PWD"
export EXAHDF5_ROOT=$mydir
export SDK_DIR=$EXAHDF5_ROOT/soft/
mkdir -p $SDK_DIR
export HDF5_ROOT=$SDK_DIR/hdf5/
mkdir -p $HDF5_ROOT
export HDF5_HOME=$HDF5_ROOT
export HDF5_DIR=$HDF5_ROOT
export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol/
mkdir -p $HDF5_VOL_DIR
mkdir -p $HDF5_VOL_DIR/lib/
mkdir -p $HDF5_VOL_DIR/include/
export ABT_DIR=$SDK_DIR/argobots/
mkdir -p $ABT_DIR
# Compile HDF5
mkdir -p hdf5/build
cd hdf5/build
cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$HDF5_DIR -DHDF5_ENABLE_PARALLEL:BOOL=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DALLOW_UNSUPPORTED:BOOL=ON ..
make -j2 install
cd -
cd argobots
./autogen.sh
./configure --prefix=$ABT_DIR
make && make install -j2
cd -
# Compile Asynchronous VOL connector
mkdir -p vol-async/build
cd vol-async/build
cmake .. -DCMAKE_INSTALL_PREFIX=$HDF5_VOL_DIR -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx
make all install -j2
cd -
# Compile Cache VOL connector
mkdir -p build
cd build
cmake .. -DCMAKE_INSTALL_PREFIX=$HDF5_VOL_DIR -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx
make all install -j2
cd -
# Compile h5bench
mkdir -p h5bench/build
cd h5bench/build
cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib -g"
make all install VERBOSE=1 -j2
cd -
- name: Test Vol-Cache-Node-Local
run: |
ulimit -d unlimited
ulimit -s unlimited
mkdir -p SSD
export EXAHDF5_ROOT=$PWD
export SDK_DIR=$EXAHDF5_ROOT/soft/
export HDF5_ROOT=$SDK_DIR/hdf5
export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol
export ABT_DIR=$SDK_DIR/argobots/
export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH
export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib
export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH
export HDF5_CACHE_DEBUG=100
printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" > cache_1.cfg
export LD_PRELOAD=$ABT_DIR/lib/libabt.so
cat cache_1.cfg
HDF5_VOL_CONNECTOR='' prepare_dataset.exe
export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}"
for opt in 'yes' 'no'
do
echo "Testing"
HDF5_CACHE_WR=$opt mpirun -np 2 write_cache.exe
HDF5_CACHE_RD=$opt mpirun -np 2 read_cache.exe
HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset.exe
HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe
HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe
HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe
HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5
done
- name: Test Vol-Cache-MEMORY
run: |
ulimit -d unlimited
ulimit -s unlimited
mkdir -p SSD
export EXAHDF5_ROOT=$PWD
export SDK_DIR=$EXAHDF5_ROOT/soft/
export HDF5_ROOT=$SDK_DIR/hdf5
export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol
export ABT_DIR=$SDK_DIR/argobots/
export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH
export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib
export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH
export HDF5_CACHE_DEBUG=100
printf "HDF5_CACHE_STORAGE_TYPE: MEMORY\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 2147483648" > cache_1.cfg
export LD_PRELOAD=$ABT_DIR/lib/libabt.so
cat cache_1.cfg
HDF5_VOL_CONNECTOR='' prepare_dataset.exe
export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}"
for opt in 'yes' 'no'
do
echo "Testing"
HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 1 test_dataset.exe
HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 1 write_cache.exe
HDF5_CACHE_RD=$opt mpirun -np 2 read_cache.exe
HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe
HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe
HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe
HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5
done
- name: Test Vol-Cache-Global
run: |
mkdir -p SSD
export EXAHDF5_ROOT=$PWD
export SDK_DIR=$EXAHDF5_ROOT/soft/
export HDF5_ROOT=$SDK_DIR/hdf5
export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol
export ABT_DIR=$SDK_DIR/argobots/
export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH
export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib
export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH
export HDF5_CACHE_DEBUG=100
printf "HDF5_CACHE_STORAGE_TYPE: GLOBAL\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: GLOBAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" > cache_1.cfg
export LD_PRELOAD=$ABT_DIR/lib/libabt.so
cat cache_1.cfg
HDF5_VOL_CONNECTOR='' prepare_dataset.exe
export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}"
for opt in 'yes' 'no'
do
echo "Testing"
HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 2 write_cache.exe
HDF5_CACHE_RD=$opt mpirun -np 2 read_cache.exe
HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset.exe
HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe
HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe
HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe
HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5
done
- name: Test Vol-Cache-Fusion
run: |
mkdir -p SSD
export EXAHDF5_ROOT=$PWD
export SDK_DIR=$EXAHDF5_ROOT/soft/
export HDF5_ROOT=$SDK_DIR/hdf5
export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol
export ABT_DIR=$SDK_DIR/argobots/
export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH
export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib
export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH
export HDF5_CACHE_DEBUG=100
printf "HDF5_CACHE_STORAGE_TYPE: MEMORY\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 2147483648\nHDF5_CACHE_FUSION_THRESHOLD: 16777216" > cache_1.cfg
export LD_PRELOAD=$ABT_DIR/lib/libabt.so
cat cache_1.cfg
HDF5_VOL_CONNECTOR='' prepare_dataset.exe
export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}"
HDF5_CACHE_WR=yes mpirun -np 2 write_cache.exe --dim 16 16