Datasets:
LEAP
/

ArXiv:
License:
File size: 1,053 Bytes
5dd4378
 
 
 
 
 
 
266a225
 
5dd4378
266a225
 
 
 
 
5dd4378
266a225
 
5dd4378
 
 
 
 
266a225
 
 
5dd4378
266a225
 
5dd4378
 
 
 
 
 
 
 
 
 
 
 
 
 
266a225
5dd4378
 
266a225
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
#!/bin/sh
#
#SBATCH --account=glab
#SBATCH -N 1
#SBATCH -c 24
#SBATCH --mem=100gb
#SBATCH --time=05-00:00

# Base URL for the chunked files
BASE_URL="https://huggingface.co/datasets/LEAP/ChaosBench/resolve/main/$1/"

# The base name of the files (without extension)
BASE_NAME="$1_chunks"

# Extension for the chunked files
EXTENSION="tar.gz"

# Download all chunked files
for prefix in {a..z}; do
    for suffix in {a..z}; do
        FILE_NAME="${BASE_NAME}.${EXTENSION}.${prefix}${suffix}"
        wget "${BASE_URL}${FILE_NAME}" || break 2
    done
done

# Combine the chunked files
cat ${BASE_NAME}.${EXTENSION}.* > "$1".tar.gz

# Remove the chunked files
rm ${BASE_NAME}.${EXTENSION}.*

# Extract the combined file
echo "EXTRACTING FOLDER, THIS MAY TAKE A WHILE..."
if [ "$2" == "with_pigz" ]; then
    echo "Use pigz for parallel decompression..."
    pigz -dc "$1".tar.gz | tar -xf -
else
    # Use standard tar for decompression
    tar -xzf "$1".tar.gz
fi

# Remove the combined compressed file
rm "$1".tar.gz

# Rename folder
mv "$1"_tmp "$1"