File size: 520 Bytes
a6326c7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
#!/bin/bash

BASE_DIR=../../data/seq_truncated
TASKS=("DST" "T2S" "SF" "RRR" "QCR" "NLI" "MRC" "MCQA" "DS" "DCRG" "ER" "ID" "DT" "CC" "CI" "ABSA")
# TASKS=("DT")
TOKENIZER_PATH="../../ckpts/t5-base"
OUTPUT_DIR=../../data

DATASETS=()

for TASK in ${TASKS[*]}; do
    for dataset in `ls ${BASE_DIR} | grep "^${TASK}-"`; do
        DATASETS+=(${BASE_DIR}/${dataset})
    done
done

python get_statistics.py \
    --input-dir-list ${DATASETS[@]} \
    --tokenizer-path ${TOKENIZER_PATH} \
    --output-path ${OUTPUT_DIR} \