Spaces:
Sleeping
Sleeping
File size: 2,199 Bytes
6faf7e7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 |
from .config_manager import ConfigManager
import os
from typing import Dict
from torch import nn
from tqdm import tqdm
from tqdm import trange
from dataset import load_iterators
from trainer import GeneralTrainer
class DiacritizationTester(GeneralTrainer):
def __init__(self, config_path: str, model_kind: str) -> None:
self.config_path = config_path
self.model_kind = model_kind
self.config_manager = ConfigManager(
config_path=config_path, model_kind=model_kind
)
self.config = self.config_manager.config
self.pad_idx = 0
self.criterion = nn.CrossEntropyLoss(ignore_index=self.pad_idx)
self.set_device()
self.text_encoder = self.config_manager.text_encoder
self.start_symbol_id = self.text_encoder.start_symbol_id
self.model = self.config_manager.get_model()
self.model = self.model.to(self.device)
self.load_model(model_path=self.config["test_model_path"], load_optimizer=False)
self.load_diacritizer()
self.diacritizer.set_model(self.model)
self.initialize_model()
self.print_config()
def run(self):
self.config_manager.config["load_training_data"] = False
self.config_manager.config["load_validation_data"] = False
self.config_manager.config["load_test_data"] = True
_, test_iterator, _ = load_iterators(self.config_manager)
tqdm_eval = trange(0, len(test_iterator), leave=True)
tqdm_error_rates = trange(0, len(test_iterator), leave=True)
loss, acc = self.evaluate(test_iterator, tqdm_eval, log = False)
error_rates, _ = self.evaluate_with_error_rates(test_iterator, tqdm_error_rates, log = False)
tqdm_eval.close()
tqdm_error_rates.close()
WER = error_rates["WER"]
DER = error_rates["DER"]
DER1 = error_rates["DER*"]
WER1 = error_rates["WER*"]
error_rates = f"DER: {DER}, WER: {WER}, DER*: {DER1}, WER*: {WER1}"
print(f"global step : {self.global_step}")
print(f"Evaluate {self.global_step}: accuracy, {acc}, loss: {loss}")
print(f"WER/DER {self.global_step}: {error_rates}")
|